Diff of the two buildlogs: -- --- b1/build.log 2025-09-09 13:33:42.628297635 +0000 +++ b2/build.log 2025-09-09 13:40:38.448812096 +0000 @@ -1,6 +1,6 @@ I: pbuilder: network access will be disabled during build -I: Current time: Tue Sep 9 01:24:07 -12 2025 -I: pbuilder-time-stamp: 1757424247 +I: Current time: Tue Oct 13 09:56:44 +14 2026 +I: pbuilder-time-stamp: 1791835004 I: Building the build Environment I: extracting base tarball [/var/cache/pbuilder/forky-reproducible-base.tgz] I: copying local configuration @@ -25,53 +25,85 @@ dpkg-source: info: applying 02_parse_h_dependency I: using fakeroot in build. I: Installing the build-deps -I: user script /srv/workspace/pbuilder/3563770/tmp/hooks/D02_print_environment starting +I: user script /srv/workspace/pbuilder/629641/tmp/hooks/D01_modify_environment starting +debug: Running on ionos5-amd64. +I: Changing host+domainname to test build reproducibility +I: Adding a custom variable just for the fun of it... +I: Changing /bin/sh to bash +'/bin/sh' -> '/bin/bash' +lrwxrwxrwx 1 root root 9 Oct 12 19:56 /bin/sh -> /bin/bash +I: Setting pbuilder2's login shell to /bin/bash +I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other +I: user script /srv/workspace/pbuilder/629641/tmp/hooks/D01_modify_environment finished +I: user script /srv/workspace/pbuilder/629641/tmp/hooks/D02_print_environment starting I: set - BUILDDIR='/build/reproducible-path' - BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other' - BUILDUSERNAME='pbuilder1' - BUILD_ARCH='amd64' - DEBIAN_FRONTEND='noninteractive' - DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=40 ' - DISTRIBUTION='forky' - HOME='/root' - HOST_ARCH='amd64' + BASH=/bin/sh + BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath + BASH_ALIASES=() + BASH_ARGC=() + BASH_ARGV=() + BASH_CMDS=() + BASH_LINENO=([0]="12" [1]="0") + BASH_LOADABLES_PATH=/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:. + BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment") + BASH_VERSINFO=([0]="5" [1]="3" [2]="3" [3]="1" [4]="release" [5]="x86_64-pc-linux-gnu") + BASH_VERSION='5.3.3(1)-release' + BUILDDIR=/build/reproducible-path + BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other' + BUILDUSERNAME=pbuilder2 + BUILD_ARCH=amd64 + DEBIAN_FRONTEND=noninteractive + DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=42 ' + DIRSTACK=() + DISTRIBUTION=forky + EUID=0 + FUNCNAME=([0]="Echo" [1]="main") + GROUPS=() + HOME=/root + HOSTNAME=i-capture-the-hostname + HOSTTYPE=x86_64 + HOST_ARCH=amd64 IFS=' ' - INVOCATION_ID='7c2ff11cb93244eba1cb6d02cd37dc9d' - LANG='C' - LANGUAGE='en_US:en' - LC_ALL='C' - MAIL='/var/mail/root' - OPTIND='1' - PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games' - PBCURRENTCOMMANDLINEOPERATION='build' - PBUILDER_OPERATION='build' - PBUILDER_PKGDATADIR='/usr/share/pbuilder' - PBUILDER_PKGLIBDIR='/usr/lib/pbuilder' - PBUILDER_SYSCONFDIR='/etc' - PPID='3563770' - PS1='# ' - PS2='> ' + INVOCATION_ID=a27ec4ddcb5143d19c6d62ef86d1811b + LANG=C + LANGUAGE=et_EE:et + LC_ALL=C + MACHTYPE=x86_64-pc-linux-gnu + MAIL=/var/mail/root + OPTERR=1 + OPTIND=1 + OSTYPE=linux-gnu + PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path + PBCURRENTCOMMANDLINEOPERATION=build + PBUILDER_OPERATION=build + PBUILDER_PKGDATADIR=/usr/share/pbuilder + PBUILDER_PKGLIBDIR=/usr/lib/pbuilder + PBUILDER_SYSCONFDIR=/etc + PIPESTATUS=([0]="0") + POSIXLY_CORRECT=y + PPID=629641 PS4='+ ' - PWD='/' - SHELL='/bin/bash' - SHLVL='2' - SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.DmuXW2zl/pbuilderrc_1729 --distribution forky --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/forky-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.DmuXW2zl/b1 --logfile b1/build.log bison_3.8.2+dfsg-1.dsc' - SUDO_GID='110' - SUDO_HOME='/var/lib/jenkins' - SUDO_UID='105' - SUDO_USER='jenkins' - TERM='unknown' - TZ='/usr/share/zoneinfo/Etc/GMT+12' - USER='root' - _='/usr/bin/systemd-run' - http_proxy='http://46.16.76.132:3128' + PWD=/ + SHELL=/bin/bash + SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix + SHLVL=3 + SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.DmuXW2zl/pbuilderrc_B5Jo --distribution forky --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/forky-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.DmuXW2zl/b2 --logfile b2/build.log bison_3.8.2+dfsg-1.dsc' + SUDO_GID=110 + SUDO_HOME=/var/lib/jenkins + SUDO_UID=105 + SUDO_USER=jenkins + TERM=unknown + TZ=/usr/share/zoneinfo/Etc/GMT-14 + UID=0 + USER=root + _='I: set' + http_proxy=http://213.165.73.152:3128 I: uname -a - Linux ionos1-amd64 6.12.43+deb13-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.12.43-1 (2025-08-27) x86_64 GNU/Linux + Linux i-capture-the-hostname 6.12.43+deb13-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.12.43-1 (2025-08-27) x86_64 GNU/Linux I: ls -l /bin - lrwxrwxrwx 1 root root 7 Aug 10 12:30 /bin -> usr/bin -I: user script /srv/workspace/pbuilder/3563770/tmp/hooks/D02_print_environment finished + lrwxrwxrwx 1 root root 7 Aug 10 2025 /bin -> usr/bin +I: user script /srv/workspace/pbuilder/629641/tmp/hooks/D02_print_environment finished -> Attempting to satisfy build-dependencies -> Creating pbuilder-satisfydepends-dummy package Package: pbuilder-satisfydepends-dummy @@ -147,7 +179,7 @@ Get: 30 http://deb.debian.org/debian forky/main amd64 po-debconf all 1.0.21+nmu1 [248 kB] Get: 31 http://deb.debian.org/debian forky/main amd64 debhelper all 13.26 [939 kB] Get: 32 http://deb.debian.org/debian forky/main amd64 help2man amd64 1.49.3 [198 kB] -Fetched 11.9 MB in 0s (58.2 MB/s) +Fetched 11.9 MB in 0s (73.9 MB/s) Preconfiguring packages ... Selecting previously unselected package m4. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 19898 files and directories currently installed.) @@ -295,7 +327,11 @@ fakeroot is already the newest version (1.37.1.2-1). 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded. I: Building the package -I: Running cd /build/reproducible-path/bison-3.8.2+dfsg/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S > ../bison_3.8.2+dfsg-1_source.changes +I: user script /srv/workspace/pbuilder/629641/tmp/hooks/A99_set_merged_usr starting +Not re-configuring usrmerge for forky +I: user script /srv/workspace/pbuilder/629641/tmp/hooks/A99_set_merged_usr finished +hostname: Name or service not known +I: Running cd /build/reproducible-path/bison-3.8.2+dfsg/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S > ../bison_3.8.2+dfsg-1_source.changes dpkg-buildpackage: info: source package bison dpkg-buildpackage: info: source version 2:3.8.2+dfsg-1 dpkg-buildpackage: info: source distribution unstable @@ -906,7 +942,7 @@ /build/reproducible-path/bison-3.8.2+dfsg/src/getargs.c make[1]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' dh_auto_build - make -j40 + make -j42 make[1]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' rm -f examples/c/reccalc/scan.stamp examples/c/reccalc/scan.stamp.tmp rm -f lib/alloca.h-t lib/alloca.h && \ @@ -976,6 +1012,7 @@ < ./lib/fcntl.in.h; \ } > lib/fcntl.h-t && \ mv lib/fcntl.h-t lib/fcntl.h +touch examples/c/reccalc/scan.stamp.tmp rm -f lib/iconv.h-t lib/iconv.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -994,6 +1031,7 @@ < ./lib/iconv.in.h; \ } > lib/iconv.h-t && \ mv lib/iconv.h-t lib/iconv.h +flex -oexamples/c/reccalc/scan.c --header=examples/c/reccalc/scan.h ./examples/c/reccalc/scan.l rm -f lib/inttypes.h-t lib/inttypes.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's/@''HAVE_INTTYPES_H''@/1/g' \ @@ -1024,13 +1062,11 @@ < ./lib/inttypes.in.h; \ } > lib/inttypes.h-t && \ mv lib/inttypes.h-t lib/inttypes.h -touch examples/c/reccalc/scan.stamp.tmp rm -f lib/textstyle.h-t lib/textstyle.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ cat ./lib/textstyle.in.h; \ } > lib/textstyle.h-t && \ mv lib/textstyle.h-t lib/textstyle.h -flex -oexamples/c/reccalc/scan.c --header=examples/c/reccalc/scan.h ./examples/c/reccalc/scan.l rm -f lib/limits.h-t lib/limits.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1380,7 +1416,6 @@ < ./lib/sched.in.h; \ } > lib/sched.h-t && \ mv lib/sched.h-t lib/sched.h -mv examples/c/reccalc/scan.stamp.tmp examples/c/reccalc/scan.stamp /usr/bin/mkdir -p lib/malloc rm -f lib/signal.h-t lib/signal.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ @@ -1822,6 +1857,7 @@ /usr/bin/mkdir -p lib/sys /usr/bin/mkdir -p lib/sys /usr/bin/mkdir -p lib/sys +mv examples/c/reccalc/scan.stamp.tmp examples/c/reccalc/scan.stamp rm -f lib/sys/ioctl.h-t lib/sys/ioctl.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1855,7 +1891,6 @@ < ./lib/sys_resource.in.h; \ } > lib/sys/resource.h-t && \ mv -f lib/sys/resource.h-t lib/sys/resource.h -/usr/bin/mkdir -p lib/sys rm -f lib/sys/stat.h-t lib/sys/stat.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1915,6 +1950,7 @@ } > lib/sys/stat.h-t && \ mv lib/sys/stat.h-t lib/sys/stat.h /usr/bin/mkdir -p lib/sys +/usr/bin/mkdir -p lib/sys rm -f lib/sys/time.h-t lib/sys/time.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1935,6 +1971,7 @@ < ./lib/sys_time.in.h; \ } > lib/sys/time.h-t && \ mv lib/sys/time.h-t lib/sys/time.h +/usr/bin/mkdir -p lib/sys rm -f lib/sys/times.h-t lib/sys/times.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1952,7 +1989,18 @@ } > lib/sys/times.h-t && \ mv lib/sys/times.h-t lib/sys/times.h /usr/bin/mkdir -p lib/sys -/usr/bin/mkdir -p lib/sys +rm -f lib/sys/types.h-t lib/sys/types.h && \ +{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ + sed -e 's|@''GUARD_PREFIX''@|GL|g' \ + -e 's|@''INCLUDE_NEXT''@|include_next|g' \ + -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ + -e 's|@''PRAGMA_COLUMNS''@||g' \ + -e 's|@''NEXT_SYS_TYPES_H''@||g' \ + -e 's|@''WINDOWS_64_BIT_OFF_T''@|0|g' \ + -e 's|@''WINDOWS_STAT_INODES''@|0|g' \ + < ./lib/sys_types.in.h; \ +} > lib/sys/types.h-t && \ +mv lib/sys/types.h-t lib/sys/types.h rm -f lib/termios.h-t lib/termios.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -2026,18 +2074,6 @@ < ./lib/time.in.h; \ } > lib/time.h-t && \ mv lib/time.h-t lib/time.h -rm -f lib/sys/types.h-t lib/sys/types.h && \ -{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ - sed -e 's|@''GUARD_PREFIX''@|GL|g' \ - -e 's|@''INCLUDE_NEXT''@|include_next|g' \ - -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ - -e 's|@''PRAGMA_COLUMNS''@||g' \ - -e 's|@''NEXT_SYS_TYPES_H''@||g' \ - -e 's|@''WINDOWS_64_BIT_OFF_T''@|0|g' \ - -e 's|@''WINDOWS_STAT_INODES''@|0|g' \ - < ./lib/sys_types.in.h; \ -} > lib/sys/types.h-t && \ -mv lib/sys/types.h-t lib/sys/types.h rm -f lib/unistd.h-t lib/unistd.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -2539,15 +2575,25 @@ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-cloexec.o `test -f 'lib/cloexec.c' || echo './'`lib/cloexec.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-close-stream.o `test -f 'lib/close-stream.c' || echo './'`lib/close-stream.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-closeout.o `test -f 'lib/closeout.c' || echo './'`lib/closeout.c +lib/careadlinkat.c: In function 'careadlinkat': +lib/careadlinkat.c:178:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] + 178 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." + | ^~~~~~~ +lib/careadlinkat.c:179:5: warning: #warning "See ." [-Wcpp] + 179 | #warning "See ." + | ^~~~~~~ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-concat-filename.o `test -f 'lib/concat-filename.c' || echo './'`lib/concat-filename.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-dirname.o `test -f 'lib/dirname.c' || echo './'`lib/dirname.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-basename.o `test -f 'lib/basename.c' || echo './'`lib/basename.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-dirname-lgpl.o `test -f 'lib/dirname-lgpl.c' || echo './'`lib/dirname-lgpl.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-stripslash.o `test -f 'lib/stripslash.c' || echo './'`lib/stripslash.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-execute.o `test -f 'lib/execute.c' || echo './'`lib/execute.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-exitfail.o `test -f 'lib/exitfail.c' || echo './'`lib/exitfail.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-fatal-signal.o `test -f 'lib/fatal-signal.c' || echo './'`lib/fatal-signal.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-fd-safer-flag.o `test -f 'lib/fd-safer-flag.c' || echo './'`lib/fd-safer-flag.c +lib/careadlinkat.c:182:10: warning: function may return address of local variable [-Wreturn-local-addr] + 182 | return readlink_stk (fd, filename, buffer, buffer_size, alloc, + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + 183 | preadlinkat, stack_buf); + | ~~~~~~~~~~~~~~~~~~~~~~~ +lib/careadlinkat.c:181:8: note: declared here + 181 | char stack_buf[STACK_BUF_SIZE]; + | ^~~~~~~~~ lib/canonicalize.c: In function 'canonicalize_filename_mode': lib/canonicalize.c:484:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] 484 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." @@ -2555,13 +2601,11 @@ lib/canonicalize.c:485:5: warning: #warning "See ." [-Wcpp] 485 | #warning "See ." | ^~~~~~~ -lib/careadlinkat.c: In function 'careadlinkat': -lib/careadlinkat.c:178:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] - 178 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." - | ^~~~~~~ -lib/careadlinkat.c:179:5: warning: #warning "See ." [-Wcpp] - 179 | #warning "See ." - | ^~~~~~~ +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-stripslash.o `test -f 'lib/stripslash.c' || echo './'`lib/stripslash.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-execute.o `test -f 'lib/execute.c' || echo './'`lib/execute.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-exitfail.o `test -f 'lib/exitfail.c' || echo './'`lib/exitfail.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-fatal-signal.o `test -f 'lib/fatal-signal.c' || echo './'`lib/fatal-signal.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-fd-safer-flag.o `test -f 'lib/fd-safer-flag.c' || echo './'`lib/fd-safer-flag.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-dup-safer-flag.o `test -f 'lib/dup-safer-flag.c' || echo './'`lib/dup-safer-flag.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-file-set.o `test -f 'lib/file-set.c' || echo './'`lib/file-set.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-findprog-in.o `test -f 'lib/findprog-in.c' || echo './'`lib/findprog-in.c @@ -2569,14 +2613,6 @@ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-fstrcmp.o `test -f 'lib/fstrcmp.c' || echo './'`lib/fstrcmp.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-gethrxtime.o `test -f 'lib/gethrxtime.c' || echo './'`lib/gethrxtime.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-xtime.o `test -f 'lib/xtime.c' || echo './'`lib/xtime.c -lib/careadlinkat.c:182:10: warning: function may return address of local variable [-Wreturn-local-addr] - 182 | return readlink_stk (fd, filename, buffer, buffer_size, alloc, - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - 183 | preadlinkat, stack_buf); - | ~~~~~~~~~~~~~~~~~~~~~~~ -lib/careadlinkat.c:181:8: note: declared here - 181 | char stack_buf[STACK_BUF_SIZE]; - | ^~~~~~~~~ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-getprogname.o `test -f 'lib/getprogname.c' || echo './'`lib/getprogname.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-gettime.o `test -f 'lib/gettime.c' || echo './'`lib/gettime.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o lib/libbison_a-hard-locale.o `test -f 'lib/hard-locale.c' || echo './'`lib/hard-locale.c @@ -2612,8 +2648,8 @@ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help -if /bin/bash '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man --version >/dev/null 2>&1; then \ - /bin/bash '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man \ +if /bin/sh '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man --version >/dev/null 2>&1; then \ + /bin/sh '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man \ --include=./doc/bison.x \ --output=doc/bison.1.tmp tests/bison && \ { sed 's/^\(\.TH[^"]*"[^"]*"[^"]*\)"[^"]*"/\1/' doc/bison.1 >doc/bison.1a.tmp || true; } && \ @@ -2641,7 +2677,7 @@ make[2]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' make[1]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' dh_auto_test - make -j40 check "TESTSUITEFLAGS=-j40 --verbose" VERBOSE=1 + make -j42 check "TESTSUITEFLAGS=-j42 --verbose" VERBOSE=1 make[1]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' if test -d ./.git \ && git --version >/dev/null 2>&1; then \ @@ -2678,81 +2714,81 @@ ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help make examples/c/calc/calc examples/c/glr/c++-types examples/c/lexcalc/lexcalc examples/c/mfcalc/mfcalc examples/c/pushcalc/calc examples/c/reccalc/reccalc examples/c/rpcalc/rpcalc examples/c++/calc++/calc++ examples/c++/glr/c++-types examples/c++/simple examples/c++/variant examples/c++/variant-11 ./tests/bison tests/atconfig tests/atlocal make[4]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' -/bin/bash ./build-aux/ylwrap examples/c/calc/calc.y y.tab.c examples/c/calc/calc.c y.tab.h `echo examples/c/calc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/calc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/glr/c++-types.y y.tab.c examples/c/glr/c++-types.c y.tab.h `echo examples/c/glr/c++-types.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/glr/c++-types.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/lexcalc/parse.y y.tab.c examples/c/lexcalc/parse.c y.tab.h `echo examples/c/lexcalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/lexcalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/calc/calc.y y.tab.c examples/c/calc/calc.c y.tab.h `echo examples/c/calc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/calc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/glr/c++-types.y y.tab.c examples/c/glr/c++-types.c y.tab.h `echo examples/c/glr/c++-types.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/glr/c++-types.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/lexcalc/parse.y y.tab.c examples/c/lexcalc/parse.c y.tab.h `echo examples/c/lexcalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/lexcalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c/lexcalc/scan.l' || echo './'`examples/c/lexcalc/scan.l lex.yy.c examples/c/lexcalc/scan.c -- flex -/bin/bash ./build-aux/ylwrap examples/c/mfcalc/mfcalc.y y.tab.c examples/c/mfcalc/mfcalc.c y.tab.h `echo examples/c/mfcalc/mfcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/mfcalc/mfcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/pushcalc/calc.y y.tab.c examples/c/pushcalc/calc.c y.tab.h `echo examples/c/pushcalc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/pushcalc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/reccalc/parse.y y.tab.c examples/c/reccalc/parse.c y.tab.h `echo examples/c/reccalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/reccalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/rpcalc/rpcalc.y y.tab.c examples/c/rpcalc/rpcalc.c y.tab.h `echo examples/c/rpcalc/rpcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/rpcalc/rpcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c/lexcalc/scan.l' || echo './'`examples/c/lexcalc/scan.l lex.yy.c examples/c/lexcalc/scan.c -- flex +/bin/sh ./build-aux/ylwrap examples/c/mfcalc/mfcalc.y y.tab.c examples/c/mfcalc/mfcalc.c y.tab.h `echo examples/c/mfcalc/mfcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/mfcalc/mfcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/pushcalc/calc.y y.tab.c examples/c/pushcalc/calc.c y.tab.h `echo examples/c/pushcalc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/pushcalc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/reccalc/parse.y y.tab.c examples/c/reccalc/parse.c y.tab.h `echo examples/c/reccalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/reccalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/rpcalc/rpcalc.y y.tab.c examples/c/rpcalc/rpcalc.c y.tab.h `echo examples/c/rpcalc/rpcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/rpcalc/rpcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines rm -f examples/c++/calc++/parser.stamp -touch examples/c++/calc++/parser.stamp.tmp \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/calc++/scanner.ll' || echo './'`examples/c++/calc++/scanner.ll lex.yy.c examples/c++/calc++/scanner.cc -- flex +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/calc++/scanner.ll' || echo './'`examples/c++/calc++/scanner.ll lex.yy.c examples/c++/calc++/scanner.cc -- flex +touch examples/c++/calc++/parser.stamp.tmp ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -o examples/c++/calc++/parser.cc examples/c++/calc++/parser.yy rm -f examples/c++/glr/c++-types.stamp -\ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/simple.yy' || echo './'`examples/c++/simple.yy y.tab.c examples/c++/simple.cc y.tab.h `echo examples/c++/simple.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/simple.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines touch examples/c++/glr/c++-types.stamp.tmp +\ +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/simple.yy' || echo './'`examples/c++/simple.yy y.tab.c examples/c++/simple.cc y.tab.h `echo examples/c++/simple.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/simple.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -o examples/c++/glr/c++-types.cc examples/c++/glr/c++-types.yy \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/variant.yy' || echo './'`examples/c++/variant.yy y.tab.c examples/c++/variant.cc y.tab.h `echo examples/c++/variant.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/variant.yy' || echo './'`examples/c++/variant.yy y.tab.c examples/c++/variant.cc y.tab.h `echo examples/c++/variant.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/variant-11.yy' || echo './'`examples/c++/variant-11.yy y.tab.c examples/c++/variant-11.cc y.tab.h `echo examples/c++/variant-11.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant-11.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/variant-11.yy' || echo './'`examples/c++/variant-11.yy y.tab.c examples/c++/variant-11.cc y.tab.h `echo examples/c++/variant-11.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant-11.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines make[4]: 'tests/bison' is up to date. make[4]: Nothing to be done for 'tests/atconfig'. make[4]: 'tests/atlocal' is up to date. +mv -f examples/c++/calc++/parser.stamp.tmp examples/c++/calc++/parser.stamp updating examples/c/calc/calc.output +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/calc++/calc__-driver.o `test -f 'examples/c++/calc++/driver.cc' || echo './'`examples/c++/calc++/driver.cc +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/calc++/calc__-scanner.o `test -f 'examples/c++/calc++/scanner.cc' || echo './'`examples/c++/calc++/scanner.cc +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/calc++/calc__-calc++.o `test -f 'examples/c++/calc++/calc++.cc' || echo './'`examples/c++/calc++/calc++.cc updating examples/c/lexcalc/parse.output +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/calc++/calc__-parser.o `test -f 'examples/c++/calc++/parser.cc' || echo './'`examples/c++/calc++/parser.cc +updating examples/c/rpcalc/rpcalc.output +updating examples/c/pushcalc/calc.output +updating examples/c/reccalc/parse.output updating examples/c/calc/calc.h gcc -DEXEEXT=\"\" -I./examples/c/calc -I./examples/c/calc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/calc/examples_c_calc_calc-calc.o `test -f 'examples/c/calc/calc.c' || echo './'`examples/c/calc/calc.c -updating examples/c/pushcalc/calc.output -updating examples/c/mfcalc/mfcalc.output +mv -f examples/c++/glr/c++-types.stamp.tmp examples/c++/glr/c++-types.stamp updating examples/c/lexcalc/parse.h -mv -f examples/c++/calc++/parser.stamp.tmp examples/c++/calc++/parser.stamp -updating examples/c/rpcalc/rpcalc.output +updating examples/c/rpcalc/rpcalc.h updating examples/c/pushcalc/calc.h gcc -DEXEEXT=\"\" -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o `test -f 'examples/c/lexcalc/parse.c' || echo './'`examples/c/lexcalc/parse.c gcc -DEXEEXT=\"\" -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o `test -f 'examples/c/lexcalc/scan.c' || echo './'`examples/c/lexcalc/scan.c -updating examples/c/glr/c++-types.output -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/calc++/calc__-driver.o `test -f 'examples/c++/calc++/driver.cc' || echo './'`examples/c++/calc++/driver.cc -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/calc++/calc__-scanner.o `test -f 'examples/c++/calc++/scanner.cc' || echo './'`examples/c++/calc++/scanner.cc -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/calc++/calc__-calc++.o `test -f 'examples/c++/calc++/calc++.cc' || echo './'`examples/c++/calc++/calc++.cc -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/calc++/calc__-parser.o `test -f 'examples/c++/calc++/parser.cc' || echo './'`examples/c++/calc++/parser.cc +gcc -DEXEEXT=\"\" -I./examples/c/rpcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o `test -f 'examples/c/rpcalc/rpcalc.c' || echo './'`examples/c/rpcalc/rpcalc.c gcc -DEXEEXT=\"\" -I./examples/c/pushcalc -I./examples/c/pushcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/pushcalc/examples_c_pushcalc_calc-calc.o `test -f 'examples/c/pushcalc/calc.c' || echo './'`examples/c/pushcalc/calc.c -updating examples/c/mfcalc/mfcalc.h -updating examples/c/reccalc/parse.output -gcc -DEXEEXT=\"\" -I./examples/c/mfcalc -I./examples/c/mfcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o `test -f 'examples/c/mfcalc/mfcalc.c' || echo './'`examples/c/mfcalc/mfcalc.c -updating examples/c/glr/c++-types.h -gcc -DEXEEXT=\"\" -I./examples/c/glr -I./examples/c/glr -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/glr/examples_c_glr_c___types-c++-types.o `test -f 'examples/c/glr/c++-types.c' || echo './'`examples/c/glr/c++-types.c -mv -f examples/c++/glr/c++-types.stamp.tmp examples/c++/glr/c++-types.stamp -updating examples/c++/simple.output -updating examples/c/rpcalc/rpcalc.h +g++ -DEXEEXT=\"\" -I./examples/c++/glr -I./examples/c++/glr -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++14 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/glr/examples_c___glr_c___types-c++-types.o `test -f 'examples/c++/glr/c++-types.cc' || echo './'`examples/c++/glr/c++-types.cc updating examples/c/reccalc/parse.h -gcc -DEXEEXT=\"\" -I./examples/c/rpcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o `test -f 'examples/c/rpcalc/rpcalc.c' || echo './'`examples/c/rpcalc/rpcalc.c -updating examples/c++/variant-11.output +updating examples/c++/simple.output +updating examples/c/glr/c++-types.output gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/reccalc/examples_c_reccalc_reccalc-parse.o `test -f 'examples/c/reccalc/parse.c' || echo './'`examples/c/reccalc/parse.c gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o `test -f 'examples/c/reccalc/scan.c' || echo './'`examples/c/reccalc/scan.c -g++ -DEXEEXT=\"\" -I./examples/c++/glr -I./examples/c++/glr -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++14 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/glr/examples_c___glr_c___types-c++-types.o `test -f 'examples/c++/glr/c++-types.cc' || echo './'`examples/c++/glr/c++-types.cc +updating examples/c++/variant.output +updating examples/c/mfcalc/mfcalc.output updating examples/c++/simple.hh -updating examples/c++/variant-11.hh +updating examples/c/glr/c++-types.h g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/simple-simple.o `test -f 'examples/c++/simple.cc' || echo './'`examples/c++/simple.cc -updating examples/c++/variant.output -g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/variant_11-variant-11.o `test -f 'examples/c++/variant-11.cc' || echo './'`examples/c++/variant-11.cc +updating examples/c++/variant-11.output updating examples/c++/variant.hh +gcc -DEXEEXT=\"\" -I./examples/c/glr -I./examples/c/glr -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/glr/examples_c_glr_c___types-c++-types.o `test -f 'examples/c/glr/c++-types.c' || echo './'`examples/c/glr/c++-types.c g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/variant-variant.o `test -f 'examples/c++/variant.cc' || echo './'`examples/c++/variant.cc +updating examples/c/mfcalc/mfcalc.h +gcc -DEXEEXT=\"\" -I./examples/c/mfcalc -I./examples/c/mfcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o `test -f 'examples/c/mfcalc/mfcalc.c' || echo './'`examples/c/mfcalc/mfcalc.c +updating examples/c++/variant-11.hh +g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -c -o examples/c++/variant_11-variant-11.o `test -f 'examples/c++/variant-11.cc' || echo './'`examples/c++/variant-11.cc gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c/rpcalc/rpcalc examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o -lm gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c/pushcalc/calc examples/c/pushcalc/examples_c_pushcalc_calc-calc.o -gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c/mfcalc/mfcalc examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o -lm gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c/calc/calc examples/c/calc/examples_c_calc_calc-calc.o +gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c/mfcalc/mfcalc examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o -lm gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c/lexcalc/lexcalc examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c/reccalc/reccalc examples/c/reccalc/examples_c_reccalc_reccalc-parse.o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c/glr/c++-types examples/c/glr/examples_c_glr_c___types-c++-types.o -g++ -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c++/variant examples/c++/variant-variant.o g++ -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c++/simple examples/c++/simple-simple.o g++ -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c++/calc++/calc++ examples/c++/calc++/calc__-driver.o examples/c++/calc++/calc__-scanner.o examples/c++/calc++/calc__-calc++.o examples/c++/calc++/calc__-parser.o +g++ -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c++/variant examples/c++/variant-variant.o g++ -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c++/variant-11 examples/c++/variant_11-variant-11.o g++ -std=c++14 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -fstack-clash-protection -Wformat -Werror=format-security -fcf-protection -Wl,-z,relro -Wl,-z,now -o examples/c++/glr/c++-types examples/c++/glr/examples_c___glr_c___types-c++-types.o make[4]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' @@ -2770,7 +2806,7 @@ } >tests/package.m4.tmp mv tests/package.m4.tmp tests/package.m4 \ - /bin/bash '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' autom4te --language=autotest -I ./tests ./tests/testsuite.at -o tests/testsuite.tmp + /bin/sh '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' autom4te --language=autotest -I ./tests ./tests/testsuite.at -o tests/testsuite.tmp make[5]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' make[5]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' Making all in po @@ -2788,25 +2824,25 @@ Making all in . make[6]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' PASS: examples/c/glr/c++-types.test -PASS: examples/c++/simple.test PASS: examples/c++/variant.test -PASS: examples/c++/variant-11.test +PASS: examples/c++/simple.test /usr/bin/mkdir -p doc LC_ALL=C tests/bison --version >doc/bison.help.tmp LC_ALL=C tests/bison --help | \ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help -PASS: examples/c++/glr/c++-types.test make[6]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' make[5]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' +PASS: examples/c++/variant-11.test +PASS: examples/c++/glr/c++-types.test PASS: examples/c/mfcalc/mfcalc.test -PASS: examples/c/calc/calc.test PASS: examples/c/pushcalc/calc.test +PASS: examples/c/calc/calc.test PASS: examples/c/rpcalc/rpcalc.test PASS: examples/c/lexcalc/lexcalc.test -PASS: examples/c++/calc++/calc++.test PASS: examples/c/reccalc/reccalc.test +PASS: examples/c++/calc++/calc++.test ============================================================================ Testsuite summary for GNU Bison 3.8.2 ============================================================================ @@ -2821,7 +2857,7 @@ make[5]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' "/usr/bin/perl" -pi -e 's/\@tb\@/\t/g' tests/testsuite.tmp mv tests/testsuite.tmp tests/testsuite -/bin/bash ./tests/testsuite -C tests -j40 --verbose +/bin/sh ./tests/testsuite -C tests -j42 --verbose ## --------------------------- ## ## GNU Bison 3.8.2 test suite. ## ## --------------------------- ## @@ -2838,43 +2874,58 @@ -1. m4.at:21: testing Generating Comments ... - -./m4.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -S ./input.m4 input.y -5. input.at:147: testing Invalid inputs with {} ... +1. m4.at:21: testing Generating Comments ... +2. input.at:27: testing Invalid number of arguments ... +./input.at:29: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret +3. input.at:58: testing Invalid options ... 4. input.at:83: testing Invalid inputs ... -./input.at:97: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' input.y || exit 77 -3. input.at:58: testing Invalid options ... -./input.at:67: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -ferror=caret input.y +5. input.at:147: testing Invalid inputs with {} ... +./input.at:97: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' input.y || exit 77 ./input.at:162: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y - - 7. input.at:204: testing Yacc warnings ... ./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y +6. input.at:173: testing Yacc warnings on symbols ... +./input.at:67: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -ferror=caret input.y +./m4.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -S ./input.m4 input.y -12. input.at:427: testing Symbol declarations ... -stderr: +9. input.at:287: testing Invalid symbol declarations ... +11. input.at:401: testing Dangling aliases ... 10. input.at:341: testing Redefining the error token ... -2. input.at:27: testing Invalid number of arguments ... -./input.at:29: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret +8. input.at:238: testing Yacc's %type ... +./input.at:304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wdangling input.y +14. input.at:552: testing Type Clashes ... +./input.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y 2.y +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y +./input.at:565: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y -9. input.at:287: testing Invalid symbol declarations ... -./input.at:304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +13. input.at:528: testing Invalid $n and @n ... -8. input.at:238: testing Yacc's %type ... -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y -./input.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:536: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +12. input.at:427: testing Symbol declarations ... + +stderr: + +18. input.at:832: testing EOF redeclared ... +./input.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S./dump-symbols.m4 input.y +15. input.at:774: testing Unused values ... +16. input.at:784: testing Unused values before symbol declarations ... +19. input.at:859: testing Symbol class redefinition ... +./input.at:871: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y bison: invalid argument 'error=caret' for '--feature' Valid arguments are: - 'none' @@ -2882,271 +2933,242 @@ - 'fixit', 'diagnostics-parseable-fixits' - 'syntax-only' - 'all' +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y ./input.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=error=itemsets input.y -./input.at:99: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -6. input.at:173: testing Yacc warnings on symbols ... -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y -13. input.at:528: testing Invalid $n and @n ... - -./input.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S./dump-symbols.m4 input.y - - -16. input.at:784: testing Unused values before symbol declarations ... 17. input.at:794: testing Symbol redeclared ... +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +22. input.at:1013: testing Undefined symbols ... +21. input.at:970: testing Per-type %printer and %destructor redeclared ... -19. input.at:859: testing Symbol class redefinition ... -./input.at:871: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y 5. input.at:147: ok +./input.at:1023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./input.at:987: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +9. input.at:287: +23. input.at:1045: testing Unassociated types used for a printer or destructor ... +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +20. input.at:899: testing Default %printer and %destructor redeclared ... + ok +25. input.at:1139: testing Unused values with default %destructor ... +26. input.at:1187: testing Unused values with per-type %destructor ... -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -14. input.at:552: testing Type Clashes ... -./input.at:565: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y - +24. input.at:1074: testing Useless printers or destructors ... +13. input.at:528: ok +29. input.at:1275: testing Incompatible Aliases ... +14. input.at:552: ./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y + ok -18. input.at:832: testing EOF redeclared ... -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -25. input.at:1139: testing Unused values with default %destructor ... -9. input.at:287: ok -15. input.at:774: testing Unused values ... +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y 27. input.at:1219: testing Duplicate string ... -./input.at:536: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -21. input.at:970: testing Per-type %printer and %destructor redeclared ... -20. input.at:899: testing Default %printer and %destructor redeclared ... -11. input.at:401: testing Dangling aliases ... -stderr: -28. input.at:1247: testing Token collisions ... -bison: invalid argument 'error=itemsets' for '--report' -Valid arguments are: - - 'none' - - 'states' - - 'itemsets' - - 'lookaheads' - - 'solved' - - 'counterexamples', 'cex' - - 'all' -./input.at:72: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror?all input.y -stderr: -30. input.at:1400: testing Torturing the Scanner ... -23. input.at:1045: testing Unassociated types used for a printer or destructor ... -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wdangling input.y ./m4.at:55: cat output.txt -./input.at:959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -22. input.at:1013: testing Undefined symbols ... -./input.at:1256: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y + +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y ./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +./input.at:1285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y + +./input.at:959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y + +19. input.at:859: + ok + +22. input.at:1013: ok + +31. input.at:1569: testing Typed symbol aliases ... +30. input.at:1400: testing Torturing the Scanner ... ./input.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -24. input.at:1074: testing Useless printers or destructors ... -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -29. input.at:1275: testing Incompatible Aliases ... -./input.at:1023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -35. input.at:1619: testing String aliases for character tokens ... -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -input.y:1.11: error: invalid null character - 1 | %header "ð€ˆ" - | ^ -input.y:2.1-2: error: invalid characters: '\0\001\002\377?' - 2 | ÿ? - | ^~ -input.y:3.2: error: invalid null character - 3 | "" - | ^ -input.y:5.1: error: invalid character: '?' - 5 | ? - | ^ -input.y:6.14: error: invalid character: '}' - 6 | default: 'a' } - | ^ -input.y:7.1: error: invalid character: '%' - 7 | %& - | ^ -input.y:7.2: error: invalid character: '&' - 7 | %& - | ^ -input.y:8.1-17: error: invalid directive: '%a-does-not-exist' - 8 | %a-does-not-exist - | ^~~~~~~~~~~~~~~~~ -input.y:9.1: error: invalid character: '%' - 9 | %- - | ^ -input.y:9.2: error: invalid character: '-' - 9 | %- - | ^ -input.y:10.1-11.0: error: missing '%}' at end of file - 10 | %{ - | ^~ -./input.at:987: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y 2.y -26. input.at:1187: testing Unused values with per-type %destructor ... -./input.at:104: "$PERL" -p -e 's{([\0\200\210\360\377])}{sprintf "\\x%02x", ord($1)}ge' stderr +./input.at:1586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +28. input.at:1247: testing Token collisions ... +./input.at:1256: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y 33. input.at:1610: testing Require 3.8.2 ... -31. input.at:1569: testing Typed symbol aliases ... -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:1285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +32. input.at:1609: testing Require 1.0 ... +./input.at:1609: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +21. input.at:970: ok + ./input.at:1610: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +35. input.at:1619: testing String aliases for character tokens ... ./input.at:1632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 34. input.at:1612: testing Require 100.0 ... -36. input.at:1642: testing Symbols ... -39. input.at:1805: testing %start after first rule ... +41. input.at:1895: testing %prec takes a token ... 37. input.at:1708: testing Numbered tokens ... -./input.at:1720: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret redecl.y -13. input.at:528: ./input.at:390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - ok -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -40. input.at:1826: testing Duplicate %start symbol ... -./input.at:1586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y 38. input.at:1750: testing Unclosed constructs ... -./input.at:1779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -./input.at:1817: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -32. input.at:1609: testing Require 1.0 ... -./input.at:1609: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y ./input.at:1612: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -bison: invalid argument 'error?all' for '--warning' -Valid arguments are: - - 'all' - - 'conflicts-rr' - - 'conflicts-sr' - - 'counterexamples', 'cex' - - 'dangling-alias' - - 'deprecated' - - 'empty-rule' - - 'everything' - - 'midrule-values' - - 'none' - - 'other' - - 'precedence' - - 'yacc' -21. input.at:970: ok -stderr: -19. input.at:859: ok -4. input.at:83: ok -22. input.at:1013: input.y:9.10-16: error: require bison 100.0, but have 3.8.2 - ok -./input.at:960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -3. input.at:58: ok +./input.at:1779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y +36. input.at:1642: testing Symbols ... ./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc input.y -34. input.at:1612: 28. input.at:1247: ./input.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --skeleton - ok - ok - -1. m4.at:21: ok -./input.at:1299: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -14. input.at:552: ok -stderr: -./input.at:468: cat symbols.csv -41. input.at:1895: testing %prec takes a token ... -./input.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y +39. input.at:1805: testing %start after first rule ... ./input.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1720: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret redecl.y +./input.at:1817: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -20. input.at:899: ok -bison: option '--skeleton' requires an argument -Try 'bison --help' for more information. -./input.at:43: sed -e \ - "s/requires an argument -- skeleton/'--skeleton' requires an argument/" \ - stderr - +28. input.at:1247: ok 42. input.at:1916: testing %prec's token must be defined ... - - -12. input.at:427: 38. input.at:1750: ok - ok - - +40. input.at:1826: testing Duplicate %start symbol ... +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +stderr: +input.y:9.10-16: error: require bison 100.0, but have 3.8.2 ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y - -./input.at:1313: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y 43. input.at:1936: testing Reject unused %code qualifiers ... -2. input.at:27: ok +34. input.at:1612: ok + +./input.at:468: cat symbols.csv +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Werror +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror +45. input.at:2065: testing errors ... +38. input.at:1750: ok ./input.at:1946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c.y + + +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror 44. input.at:2025: testing Multiple %code ... -48. input.at:2191: testing "%define" code variables ... ./input.at:2054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +41. input.at:1895: ./input.at:2077: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-redefined.y +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror + ok +stderr: + 47. input.at:2170: testing "%define" Boolean variables ... -46. input.at:2102: testing %define, --define, --force-define ... +32. input.at:1609: ok +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror 49. input.at:2224: testing "%define" keyword variables ... -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy -./input.at:1327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y - - +48. input.at:2191: testing "%define" code variables ... ./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:2180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret Input.y +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -41. input.at:1895: ./input.at:2118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dvar-dd=cmd-d1 -Dvar-dd=cmd-d2 \ +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +stderr: +46. input.at:2102: testing %define, --define, --force-define ... +./input.at:2118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dvar-dd=cmd-d1 -Dvar-dd=cmd-d2 \ -Fvar-ff=cmd-f1 -Fvar-ff=cmd-f2 \ +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror + -Dvar-dfg=cmd-d -Fvar-dfg=cmd-f \ -Fvar-fd=cmd-f -Dvar-fd=cmd-d \ --skeleton ./skel.c input.y - ok -45. input.at:2065: testing errors ... -./input.at:2077: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-redefined.y -./input.at:2180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret Input.y - - +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Werror 31. input.at:1569: ok -35. input.at:1619: ./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror - ok -stderr: -52. input.at:2342: testing "%define" backward compatibility ... -stderr: +./input.at:2091: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-unused.y +33. input.at:1610: ok + 50. input.at:2257: testing "%define" enum variables ... ./input.at:2269: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:2355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror -32. input.at:1609: ok -53. input.at:2393: testing Unused api.pure ... -39. input.at:1805: ok -55. input.at:2482: testing Bad character literals ... -./input.at:2484: -set x `LC_ALL=C ls -l 'empty.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='empty.y'; } || exit 77 -33. input.at:1610: ./input.at:390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - ok +35. input.at:1619: ok +52. input.at:2342: testing "%define" backward compatibility ... 51. input.at:2320: testing "%define" file variables ... ./input.at:2329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +39. input.at:1805: +./input.at:2355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +53. input.at:2393: testing Unused api.pure ... +./input.at:2413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y + ok +./input.at:1735: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret too-large.y +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror 54. input.at:2429: testing C++ namespace reference errors ... ./input.at:2450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Werror -./input.at:2091: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-unused.y -./input.at:2413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./input.at:2123: cat input.tab.c -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Werror -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Werror +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +55. input.at:2482: testing Bad character literals ... +./input.at:2484: +set x `LC_ALL=C ls -l 'empty.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='empty.y'; } || exit 77 +56. input.at:2543: testing Bad escapes in literals ... -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Werror +./input.at:2556: "$PERL" -e 'print "start: \"\\\t\\\f\\\0\\\1\" ;";' >> input.y || exit 77 +52. input.at:2342: ok +57. input.at:2582: testing Unexpected end of file ... +./input.at:2586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:2558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: -./input.at:1359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:2591: +set x `LC_ALL=C ls -l 'char.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='char.y'; } || exit 77 0+0 records in 0+0 records out -0 bytes copied, 6.1155e-05 s, 0.0 kB/s -52. input.at:2342: ok +0 bytes copied, 4.6998e-05 s, 0.0 kB/s stdout: -./input.at:2303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:2135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dvar=cmd-d input-dg.y +58. input.at:2675: testing LAC: Errors for %define ... +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=none input.y +47. input.at:2170: ok +./input.at:2490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret empty.y -56. input.at:2543: testing Bad escapes in literals ... -./input.at:1735: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret too-large.y +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:1960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c-glr.y +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror +51. input.at:2320: ok + +59. input.at:2719: testing -Werror combinations ... +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y + +60. input.at:2764: testing %name-prefix and api.prefix are incompatible ... +./input.at:2779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-deprecated input.y +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +61. input.at:2793: testing Redefined %union name ... +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=full input.y +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:1299: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:99: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y +1. m4.at:21: ok +stderr: +12. input.at:427: bison: invalid argument 'error=itemsets' for '--report' +Valid arguments are: + - 'none' + - 'states' + - 'itemsets' + - 'lookaheads' + - 'solved' + - 'counterexamples', 'cex' + - 'all' + ok +./input.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --skeleton +./input.at:72: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror?all input.y + + +62. input.at:2840: testing Stray $ or @ ... +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y +stderr: +input.y:1.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc] + 1 | %nterm exp + | ^~~~~~ +input.y:2.12-15: error: POSIX Yacc does not support hexadecimal literals [-Werror=yacc] + 2 | %token NUM 0x40 "number" + | ^~~~ +input.y:2.17-24: error: POSIX Yacc does not support string literals [-Werror=yacc] + 2 | %token NUM 0x40 "number" + | ^~~~~~~~ +input.y:4.6-13: error: POSIX Yacc does not support string literals [-Werror=yacc] + 4 | exp: "number"; + | ^~~~~~~~ +stderr: +63. input.at:2883: testing Code injection ... +input.y:2.13-17: error: string literal "bar" not attached to a symbol [-Werror=dangling-alias] + 2 | %type "bar" + | ^~~~~ +input.y:4.19-23: error: string literal "baz" not attached to a symbol [-Werror=dangling-alias] + 4 | expr: "foo" "bar" "baz" + | ^~~~~ +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y stderr: stderr: -./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror input.y:1.1-11: error: POSIX Yacc does not support %destructor [-Werror=yacc] 1 | %destructor {} | ^~~~~~~~~~~ @@ -3159,21 +3181,34 @@ input.y:7.4-9: error: POSIX Yacc does not support %empty [-Werror=yacc] 7 | b: %empty { $$ = 42; }; | ^~~~~~ -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -input.y:1.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc] - 1 | %nterm exp - | ^~~~~~ -input.y:2.12-15: error: POSIX Yacc does not support hexadecimal literals [-Werror=yacc] - 2 | %token NUM 0x40 "number" - | ^~~~ -input.y:2.17-24: error: POSIX Yacc does not support string literals [-Werror=yacc] - 2 | %token NUM 0x40 "number" - | ^~~~~~~~ -input.y:4.6-13: error: POSIX Yacc does not support string literals [-Werror=yacc] - 4 | exp: "number"; - | ^~~~~~~~ +input.y:1.16-18: error: symbol FOO redeclared [-Werror=other] + 1 | %token FOO BAR FOO 0 + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %token FOO BAR FOO 0 + | ^~~ +./input.at:2414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2452: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:2054: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./input.at:2135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dvar=cmd-d input-dg.y +stderr: +37. input.at:1708: ok +0+0 records in +0+0 records out +0 bytes copied, 3.9972e-05 s, 0.0 kB/s +stdout: +56. input.at:2543: ok +./input.at:2594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret char.y +./input.at:2508: +set x `LC_ALL=C ls -l 'two.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='two.y'; } || exit 77 + +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y + +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror stderr: -./input.at:2556: "$PERL" -e 'print "start: \"\\\t\\\f\\\0\\\1\" ;";' >> input.y || exit 77 input.y:1.12-14: error: duplicate directive [-Werror=other] 1 | %start exp exp exp | ^~~ @@ -3187,12 +3222,7 @@ 1 | %start exp exp exp | ^~~ input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./input.at:2490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret empty.y -57. input.at:2582: testing Unexpected end of file ... -./input.at:2586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y stderr: - -./input.at:2558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y input.y:6.8-45: error: unset value: $$ [-Werror=other] 6 | start: end end tagged tagged { $1; $3; } ; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3202,110 +3232,8 @@ input.y:7.6-8: error: unset value: $$ [-Werror=other] 7 | end: { } ; | ^~~ - -58. input.at:2675: testing LAC: Errors for %define ... -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=none input.y -47. input.at:2170: ok -./input.at:216: sed 's,.*/$,,' stderr 1>&2 -./input.at:2591: -set x `LC_ALL=C ls -l 'char.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='char.y'; } || exit 77 -./input.at:1555: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -59. input.at:2719: testing -Werror combinations ... -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y -./input.at:2054: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -./input.at:1152: sed 's,.*/$,,' stderr 1>&2 -./input.at:1836: sed 's,.*/$,,' stderr 1>&2 -stderr: -./input.at:182: sed 's,.*/$,,' stderr 1>&2 -60. input.at:2764: testing %name-prefix and api.prefix are incompatible ... -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error -./input.at:2146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Dvar=cmd-d input-dg.y -./input.at:1960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c-glr.y -56. input.at:2543: ok -./input.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:2779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-deprecated input.y -input.y:1.16-18: error: symbol FOO redeclared [-Werror=other] - 1 | %token FOO BAR FOO 0 - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %token FOO BAR FOO 0 - | ^~~ -stderr: -./input.at:2508: -set x `LC_ALL=C ls -l 'two.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='two.y'; } || exit 77 -62. input.at:2840: testing Stray $ or @ ... -61. input.at:2793: testing Redefined %union name ... -input.y:1.12-14: error: symbol FOO redeclared [-Werror=other] - 1 | %token FOO FOO - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %token FOO FOO - | ^~~ -input.y:2.15-17: error: symbol BAR redeclared [-Werror=other] - 2 | %token BAR 12 BAR 12 - | ^~~ -input.y:2.8-10: note: previous declaration - 2 | %token BAR 12 BAR 12 - | ^~~ -input.y:3.14-16: error: symbol EOF redeclared [-Werror=other] - 3 | %token EOF 0 EOF 0 - | ^~~ -input.y:3.8-10: note: previous declaration - 3 | %token EOF 0 EOF 0 - | ^~~ -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -stderr: -0+0 records in -0+0 records out -0 bytes copied, 4.8183e-05 s, 0.0 kB/s -37. input.at:1708: ok -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y -45. input.at:2065: ok -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -stdout: -./input.at:2594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret char.y -./input.at:843: sed 's,.*/$,,' stderr 1>&2 -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error - -stderr: - -29. input.at:1275: ok -./input.at:2158: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dunused-d -Funused-f input-unused.y -63. input.at:2883: testing Code injection ... -input.y:6.8-22: error: unset value: $$ [-Werror=other] - 6 | start: end end { $1; } ; - | ^~~~~~~~~~~~~~~ -input.y:6.12-14: error: unused value: $2 [-Werror=other] - 6 | start: end end { $1; } ; - | ^~~ -input.y:7.6-8: error: unset value: $$ [-Werror=other] - 7 | end: { } ; - | ^~~ -stderr: -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y -./input.at:2414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -0+0 records in -0+0 records out -0 bytes copied, 4.6381e-05 s, 0.0 kB/s -stdout: -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror -./input.at:804: sed 's,.*/$,,' stderr 1>&2 -51. input.at:2320: ./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error - ok -stderr: -stderr: -./input.at:2514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret two.y stderr: stderr: -input.y:4.22-28: error: type is used, but is not associated to any symbol [-Werror=other] -input.y:5.25-31: error: type is used, but is not associated to any symbol [-Werror=other] input.y:2.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc] 2 | %nterm nterm1 | ^~~~~~ @@ -3330,62 +3258,50 @@ input.y:10.9-16: error: POSIX Yacc does not support string literals [-Werror=yacc] 10 | nterm3: "TOKEN3" | ^~~~~~~~ -input.y:6.11-14: error: symbol "<=" used more than once as a literal string [-Werror=other] -./input.at:2604: -set x `LC_ALL=C ls -l 'escape-in-char.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-char.y'; } || exit 77 -64. input.at:2946: testing Deprecated directives ... 65. input.at:3077: testing Unput's effect on locations ... -input.y:2.13-17: error: string literal "bar" not attached to a symbol [-Werror=dangling-alias] - 2 | %type "bar" - | ^~~~~ -input.y:4.19-23: error: string literal "baz" not attached to a symbol [-Werror=dangling-alias] - 4 | expr: "foo" "bar" "baz" - | ^~~~~ -./input.at:3092: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y - - -./input.at:3019: cp errors-all experr -./input.at:1199: sed 's,.*/$,,' stderr 1>&2 -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=full input.y +input.y:4.22-28: error: type is used, but is not associated to any symbol [-Werror=other] +input.y:5.25-31: error: type is used, but is not associated to any symbol [-Werror=other] stderr: -./input.at:1062: sed 's,.*/$,,' stderr 1>&2 -./input.at:1236: sed 's,.*/$,,' stderr 1>&2 -./input.at:3020: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -ffixit input.y -input.y:2.8-17: error: token for %prec is not defined: PREC [-Werror=other] -./input.at:2522: -set x `LC_ALL=C ls -l 'three.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='three.y'; } || exit 77 -./input.at:2452: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:410: sed 's,.*/$,,' stderr 1>&2 -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error - stderr: -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:3092: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +64. input.at:2946: testing Deprecated directives ... +input.y:6.8-22: error: unset value: $$ [-Werror=other] + 6 | start: end end { $1; } ; + | ^~~~~~~~~~~~~~~ +input.y:6.12-14: error: unused value: $2 [-Werror=other] + 6 | start: end end { $1; } ; + | ^~~ +input.y:7.6-8: error: unset value: $$ [-Werror=other] + 7 | end: { } ; + | ^~~ +input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] +input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] +input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] +input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] +input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] stderr: -./input.at:253: sed 's,.*/$,,' stderr 1>&2 -0+0 records in -0+0 records out -0 bytes copied, 5.2901e-05 s, 0.0 kB/s -0+0 records in -0+0 records out -0 bytes copied, 4.5592e-05 s, 0.0 kB/s - -stdout: -./input.at:1925: sed 's,.*/$,,' stderr 1>&2 -stdout: -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=error -./input.at:2528: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret three.y -./input.at:2607: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-char.y -67. input.at:3148: testing Cannot type action ... -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror stderr: -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=error +./input.at:3019: cp errors-all experr +input.y:2.8-17: error: token for %prec is not defined: PREC [-Werror=other] +stderr: +input.y:1.12-14: error: symbol FOO redeclared [-Werror=other] + 1 | %token FOO FOO + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %token FOO FOO + | ^~~ +input.y:2.15-17: error: symbol BAR redeclared [-Werror=other] + 2 | %token BAR 12 BAR 12 + | ^~~ +input.y:2.8-10: note: previous declaration + 2 | %token BAR 12 BAR 12 + | ^~~ +input.y:3.14-16: error: symbol EOF redeclared [-Werror=other] + 3 | %token EOF 0 EOF 0 + | ^~~ +input.y:3.8-10: note: previous declaration + 3 | %token EOF 0 EOF 0 + | ^~~ input.y:1.1-5: error: POSIX Yacc does not support %code [-Werror=yacc] input.y:9.8-16: error: POSIX Yacc forbids dashes in symbol names: WITH-DASH [-Werror=yacc] input.y:10.21-34: error: POSIX Yacc does not support string literals [-Werror=yacc] @@ -3394,193 +3310,106 @@ input.y:20.8-16: error: POSIX Yacc forbids dashes in symbol names: with-dash [-Werror=yacc] input.y:22.15-28: error: POSIX Yacc does not support string literals [-Werror=yacc] input.y:24.17-32: error: POSIX Yacc does not support string literals [-Werror=yacc] -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -68. input.at:3171: testing Character literals and api.token.raw ... -66. input.at:3113: testing Non-deprecated directives ... -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error +./input.at:3020: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -ffixit input.y 65. input.at:3077: ok -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:1973: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++.y +stderr: +input.yy:2.1-30: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] +input.yy:4.1-30: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:5.1-30: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:3.1-30: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] +45. input.at:2065: ok stderr: -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none input.y:16.13-19: error: useless %printer for type [-Werror=other] input.y:17.16-22: error: useless %destructor for type [-Werror=other] -stderr: -./input.at:3181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -55. input.at:2482: ok -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror -./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -69. input.at:3205: testing %token-table and parse.error ... -input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] -input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] -input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] -input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] -input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] -50. input.at:2257: ok -./input.at:3220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:1666: sed 's,.*/$,,' stderr 1>&2 -./input.at:2780: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -p bar -Wno-deprecated input.y -./input.at:2617: -set x `LC_ALL=C ls -l 'string.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='string.y'; } || exit 77 -./input.at:1085: sed 's,.*/$,,' stderr 1>&2 -68. input.at:3171: ok -./input.at:2415: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=unsupported input.y -./input.at:2246: sed 's,.*/$,,' stderr 1>&2 -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=error -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none +./input.at:2780: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -p bar -Wno-deprecated input.y stderr: -./input.at:3022: sed -e '/^fix-it:/d' errors-all >experr +66. input.at:3113: testing Non-deprecated directives ... +input.y:1.11: error: invalid null character + 1 | %header "ð€ˆ" + | ^ +input.y:2.1-2: error: invalid characters: '\0\001\002\377?' + 2 | ÿ? + | ^~ +input.y:3.2: error: invalid null character + 3 | "" + | ^ +input.y:5.1: error: invalid character: '?' + 5 | ? + | ^ +input.y:6.14: error: invalid character: '}' + 6 | default: 'a' } + | ^ +input.y:7.1: error: invalid character: '%' + 7 | %& + | ^ +input.y:7.2: error: invalid character: '&' + 7 | %& + | ^ +input.y:8.1-17: error: invalid directive: '%a-does-not-exist' + 8 | %a-does-not-exist + | ^~~~~~~~~~~~~~~~~ +input.y:9.1: error: invalid character: '%' + 9 | %- + | ^ +input.y:9.2: error: invalid character: '-' + 9 | %- + | ^ +input.y:10.1-11.0: error: missing '%}' at end of file + 10 | %{ + | ^~ +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:104: "$PERL" -p -e 's{([\0\200\210\360\377])}{sprintf "\\x%02x", ord($1)}ge' stderr +20. input.at:899: ok +67. input.at:3148: testing Cannot type action ... +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:1973: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++.y -0+0 records in -0+0 records out -0 bytes copied, 7.0638e-05 s, 0.0 kB/s -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y -stdout: -70. input.at:3231: testing Invalid file prefix mapping arguments ... -./input.at:3023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:2620: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret string.y -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +68. input.at:3171: testing Character literals and api.token.raw ... +./input.at:3181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y stderr: input.y:2.15: error: stray '$' [-Werror=other] -./input.at:3246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo input.y stderr: -stderr: -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -46. input.at:2102: ok input.y:3.8-10: error: %define variable 'api.value.union.name' redefined [-Werror=other] input.y:1.8-10: note: previous definition input.y:4.1-32: error: %define variable 'api.value.union.name' redefined [-Werror=other] input.y:3.8-10: note: previous definition input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -input.yy:2.1-30: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] -input.yy:4.1-30: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:5.1-30: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:3.1-30: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] -71. named-refs.at:22: testing Tutorial calculator ... - -72. named-refs.at:196: testing Undefined and ambiguous references ... -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Wnone,none -Werror --trace=none -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./named-refs.at:254: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:2727: sed 's,.*/$,,' stderr 1>&2 -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:2454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Wnone,none -Werror --trace=none -./named-refs.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./input.at:2630: -set x `LC_ALL=C ls -l 'escape-in-string.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-string.y'; } || exit 77 -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none -./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -73. named-refs.at:297: testing Misleading references ... -./input.at:2213: sed 's,.*/$,,' stderr 1>&2 - -./input.at:2808: sed 's,.*/$,,' stderr 1>&2 -./input.at:3247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --file-prefix-map foo input.y -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -72. named-refs.at:196: ok -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error -./input.at:2781: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -Wno-deprecated input.y -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -stderr: -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -0+0 records in -0+0 records out -0 bytes copied, 4.9661e-05 s, 0.0 kB/s -stdout: -./input.at:3221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2633: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-string.y +68. input.at:3171: ok +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror ./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -74. named-refs.at:316: testing Many kinds of errors ... -./named-refs.at:384: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error -./input.at:3248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo=bar -M baz input.y +./input.at:843: sed 's,.*/$,,' stderr 1>&2 +./input.at:216: sed 's,.*/$,,' stderr 1>&2 +./input.at:410: sed 's,.*/$,,' stderr 1>&2 -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none -./input.at:2643: -set x `LC_ALL=C ls -l 'tstring.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='tstring.y'; } || exit 77 -./input.at:2416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=none input.y -./input.at:1986: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++-glr.y -./input.at:3027: rm -f output.c -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./named-refs.at:426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Wnone,none -Werror --trace=none -./input.at:3249: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo= -M baz input.y -./input.at:3028: cp input.y input.y.orig -stderr: +./input.at:182: sed 's,.*/$,,' stderr 1>&2 stderr: -input.y:11.19: error: stray '$' [-Werror=other] -input.y:11.23: error: stray '@' [-Werror=other] -input.y:12.19: error: stray '$' [-Werror=other] -input.y:12.23: error: stray '@' [-Werror=other] -input.y:13.19: error: stray '$' [-Werror=other] -input.y:13.23: error: stray '@' [-Werror=other] -input.y:16.19: error: stray '$' [-Werror=other] -input.y:16.23: error: stray '@' [-Werror=other] -input.y:17.19: error: stray '$' [-Werror=other] -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=none -Werror --trace=none -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none -./input.at:3029: sed -e '/fix-it/d' experr -input.y:10.6-13: error: only midrule actions can be typed: int [-Werror=other] - 10 | exp: {} - | ^~~~~~~~ +bison: invalid argument 'error?all' for '--warning' +Valid arguments are: + - 'all' + - 'conflicts-rr' + - 'conflicts-sr' + - 'counterexamples', 'cex' + - 'dangling-alias' + - 'deprecated' + - 'empty-rule' + - 'everything' + - 'midrule-values' + - 'none' + - 'other' + - 'precedence' + - 'yacc' stderr: -0+0 records in -0+0 records out -0 bytes copied, 4.8721e-05 s, 0.0 kB/s -./input.at:3030: echo "bison: file 'input.y' was updated (backup: 'input.y~')" >>experr -./input.at:3031: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update input.y -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none -stdout: -./input.at:2646: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret tstring.y -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=none -Werror --trace=none -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:2861: sed 's,.*/$,,' stderr 1>&2 -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -70. input.at:3231: ok -./input.at:3034: diff input.y.orig input.y~ -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none -./input.at:3156: sed 's,.*/$,,' stderr 1>&2 -./input.at:3037: test ! -f output.c -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -74. named-refs.at:316: ok -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:2656: -set x `LC_ALL=C ls -l 'escape-in-tstring.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-tstring.y'; } || exit 77 -75. named-refs.at:551: testing Missing identifiers in brackets ... -./named-refs.at:559: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./input.at:3040: sed -e '1,8d' input.y -./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -75. named-refs.at:551: 69. input.at:3205: ok - ok - -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -7. input.at:204: ok +bison: option '--skeleton' requires an argument +Try 'bison --help' for more information. +3. input.at:58: ok +./input.at:43: sed -e \ + "s/requires an argument -- skeleton/'--skeleton' requires an argument/" \ + stderr +69. input.at:3205: testing %token-table and parse.error ... +./input.at:3220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: -./input.at:3062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full input.y -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error input.y:12.10-32: error: unset value: $$ [-Werror=other] 12 | a: INT | INT { } INT { } INT { }; | ^~~~~~~~~~~~~~~~~~~~~~~ @@ -3686,84 +3515,43 @@ input.y:26.40-42: error: unset value: $$ [-Werror=other] 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; | ^~~ -./input.at:2782: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -p bar -Wno-deprecated input.y -stderr: + +70. input.at:3231: testing Invalid file prefix mapping arguments ... +./input.at:3246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo input.y +./input.at:1062: sed 's,.*/$,,' stderr 1>&2 +./input.at:2727: sed 's,.*/$,,' stderr 1>&2 +./input.at:253: sed 's,.*/$,,' stderr 1>&2 +./input.at:2246: sed 's,.*/$,,' stderr 1>&2 +./input.at:390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./input.at:1925: sed 's,.*/$,,' stderr 1>&2 +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=unsupported input.y +./input.at:1313: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:804: sed 's,.*/$,,' stderr 1>&2 +./input.at:1836: sed 's,.*/$,,' stderr 1>&2 +./input.at:2213: sed 's,.*/$,,' stderr 1>&2 +./input.at:1152: sed 's,.*/$,,' stderr 1>&2 +./input.at:2808: sed 's,.*/$,,' stderr 1>&2 +./input.at:1085: sed 's,.*/$,,' stderr 1>&2 +./input.at:1666: sed 's,.*/$,,' stderr 1>&2 +./input.at:1199: sed 's,.*/$,,' stderr 1>&2 +./input.at:2604: +set x `LC_ALL=C ls -l 'escape-in-char.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-char.y'; } || exit 77 stderr: -./input.at:2456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y 0+0 records in 0+0 records out -0 bytes copied, 5.4519e-05 s, 0.0 kB/s -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Werror -stdout: -./input.at:2659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-tstring.y -stderr: -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +0 bytes copied, 4.4543e-05 s, 0.0 kB/s stdout: -input.y:14.1-15.5: error: duplicate directive: '%file-prefix' [-Werror=other] -input.y:13.1-18: note: previous declaration -input.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +4. input.at:83: ok +./input.at:2514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret two.y -./input.at:2055: $PREPARSER ./input -6. input.at:173: ok stderr: -./input.at:2055: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -26. input.at:1187: ok -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none -./input.at:3133: sed 's,.*/$,,' stderr 1>&2 -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y - -77. named-refs.at:583: testing Comments in brackets ... - -76. named-refs.at:567: testing Redundant words in brackets ... -./named-refs.at:575: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -17. input.at:794: ./input.at:775: sed 's,.*/$,,' stderr 1>&2 - ok - -./named-refs.at:591: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -57. input.at:2582: ok -11. input.at:401: ok - -./input.at:1999: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-@@.y -8. input.at:238: ok -./input.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./named-refs.at:184: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -44. input.at:2025: ./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=none -Werror --trace=none - ok -78. named-refs.at:599: testing Stray symbols in brackets ... -18. input.at:832: ok -76. named-refs.at:567: ./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error - ok -42. input.at:1916: ok - -79. named-refs.at:618: testing Redundant words in LHS brackets ... -./named-refs.at:625: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=unsupported input.y -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -77. named-refs.at:583: ok -./named-refs.at:607: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' test.y || exit 77 -80. named-refs.at:635: testing Factored LHS ... -./named-refs.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -81. named-refs.at:648: testing Unresolved references ... -./named-refs.at:676: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none -27. input.at:1219: stderr: - - ok -64. input.at:2946: - ok -test.y:11.22-29: error: misleading reference: '$foo.bar' [-Werror=other] -test.y:11.8-10: note: refers to: $foo at $1 -test.y:11.12-18: note: possibly meant: $[foo.bar] at $2 -82. named-refs.at:715: testing $ or @ followed by . or - ... stderr: -./named-refs.at:608: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -23. input.at:1045: ok -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./named-refs.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y +./input.at:3022: sed -e '/^fix-it:/d' errors-all >experr +input.y:10.6-13: error: only midrule actions can be typed: int [-Werror=other] + 10 | exp: {} + | ^~~~~~~~ input.y:12.10-32: error: unset value: $$ [-Werror=other] 12 | a: INT | INT { } INT { } INT { }; | ^~~~~~~~~~~~~~~~~~~~~~~ @@ -3869,448 +3657,589 @@ input.y:26.40-42: error: unset value: $$ [-Werror=other] 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; | ^~~ +stderr: +./input.at:1986: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++-glr.y +71. named-refs.at:22: testing Tutorial calculator ... +input.y:11.19: error: stray '$' [-Werror=other] +input.y:11.23: error: stray '@' [-Werror=other] +input.y:12.19: error: stray '$' [-Werror=other] +input.y:12.23: error: stray '@' [-Werror=other] +input.y:13.19: error: stray '$' [-Werror=other] +input.y:13.23: error: stray '@' [-Werror=other] +input.y:16.19: error: stray '$' [-Werror=other] +input.y:16.23: error: stray '@' [-Werror=other] +input.y:17.19: error: stray '$' [-Werror=other] +stderr: +stderr: +./named-refs.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +input.y:14.1-15.5: error: duplicate directive: '%file-prefix' [-Werror=other] +input.y:13.1-18: note: previous declaration +input.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +input.y:6.11-14: error: symbol "<=" used more than once as a literal string [-Werror=other] +./input.at:2454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:3247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --file-prefix-map foo input.y +./input.at:785: sed 's,.*/$,,' stderr 1>&2 +./input.at:1555: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./input.at:2303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:3221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +2. input.at:27: ok +72. named-refs.at:196: testing Undefined and ambiguous references ... +./named-refs.at:254: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=none input.y +72. named-refs.at:196: ok +./input.at:2146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Dvar=cmd-d input-dg.y +./input.at:2415: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:775: sed 's,.*/$,,' stderr 1>&2 +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error +./input.at:1236: sed 's,.*/$,,' stderr 1>&2 +./input.at:2781: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -Wno-deprecated input.y +./input.at:2861: sed 's,.*/$,,' stderr 1>&2 +./input.at:3156: sed 's,.*/$,,' stderr 1>&2 +./input.at:3133: sed 's,.*/$,,' stderr 1>&2 +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +73. named-refs.at:297: testing Misleading references ... +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./input.at:2522: +set x `LC_ALL=C ls -l 'three.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='three.y'; } || exit 77 +stderr: +0+0 records in +0+0 records out +0 bytes copied, 4.6216e-05 s, 0.0 kB/s +stdout: +./input.at:2607: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-char.y +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Werror +69. input.at:3205: ./input.at:1999: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-@@.y + ok +50. input.at:2257: ok -79. named-refs.at:618: ok -./input.at:2458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -stderr: -83. output.at:68: testing Output files: -dv ... -./named-refs.at:306: sed 's,.*/$,,' stderr 1>&2 +74. named-refs.at:316: testing Many kinds of errors ... +./named-refs.at:384: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +75. named-refs.at:551: testing Missing identifiers in brackets ... +./named-refs.at:559: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./named-refs.at:426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=error +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error +./input.at:1327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./named-refs.at:184: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +75. named-refs.at:551: ok +76. named-refs.at:567: testing Redundant words in brackets ... +./named-refs.at:575: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full input.y +76. named-refs.at:567: ok +./input.at:2158: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dunused-d -Funused-f input-unused.y +77. named-refs.at:583: testing Comments in brackets ... +./named-refs.at:591: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +77. named-refs.at:583: ok +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -87. output.at:87: testing Output files: api.header.include={"./foo.h"} -dv -y ... -81. named-refs.at:648: stdout: - ok -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:785: sed 's,.*/$,,' stderr 1>&2 +78. named-refs.at:599: testing Stray symbols in brackets ... +./named-refs.at:607: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' test.y || exit 77 +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=error +./input.at:2456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:3023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:3248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo=bar -M baz input.y +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y stderr: -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none -./input.at:391: $PREPARSER ./input -60. input.at:2764: ok -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret test.y +0+0 records in +0+0 records out +0 bytes copied, 8.7667e-05 s, 0.0 kB/s stdout: -./output.at:87: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y - +./input.at:2528: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret three.y stderr: -85. output.at:81: testing Output files: -dv -o foo.c ... -./output.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv foo.y +test.y:11.22-29: error: misleading reference: '$foo.bar' [-Werror=other] +test.y:11.8-10: note: refers to: $foo at $1 +test.y:11.12-18: note: possibly meant: $[foo.bar] at $2 +74. named-refs.at:316: ./input.at:2416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y + ok +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2617: +set x `LC_ALL=C ls -l 'string.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='string.y'; } || exit 77 +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=error +./named-refs.at:608: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none stderr: + +stdout: +./input.at:2012: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-].y +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error +./input.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./named-refs.at:306: sed 's,.*/$,,' stderr 1>&2 +./input.at:3249: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo= -M baz input.y ./input.at:1556: $CC $CFLAGS $CPPFLAGS -c -o main.o main.c -78. named-refs.at:599: ok -84. output.at:74: testing Output files: -dv >&- ... -./output.at:74: case "$PREBISON" in *valgrind*) exit 77;; esac -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -input.y:6.23-28: error: unused value: $4 [-Werror=other] -input.y:8.9-11: error: unset value: $$ [-Werror=other] +55. input.at:2482: ok +79. named-refs.at:618: testing Redundant words in LHS brackets ... +./named-refs.at:625: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +stderr: +0+0 records in +0+0 records out +0 bytes copied, 4.8592e-05 s, 0.0 kB/s +stdout: +stderr: +./input.at:2620: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret string.y +stdout: +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2055: $PREPARSER ./input +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none +79. named-refs.at:618: ok -89. output.at:95: testing Output files: --fixed-output-files -dv -g --html ... -88. output.at:92: testing Output files: -dv -o foo.tab.c ... -./input.at:391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=error +78. named-refs.at:599: ok +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +stderr: +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Wnone,none -Werror --trace=none +stdout: +46. input.at:2102: ok +./input.at:391: $PREPARSER ./input +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none +80. named-refs.at:635: testing Factored LHS ... +./named-refs.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Wnone,none -Werror --trace=none -90. output.at:97: testing Output files: -Hfoo.header -v -gfoo.gv --html=foo.html ... -86. output.at:84: testing Output files: -dv -y ... -./output.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.c foo.y -./output.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.tab.c foo.y -90. output.at:97: 10. input.at:341: ok -89. output.at:95: 91. output.at:100: testing Output files: -dv -g --xml --fixed-output-files ... -./output.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y -./input.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none - skipped (output.at:95) - skipped (output.at:97) -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./output.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv >&- foo.y -./input.at:1175: sed 's,.*/$,,' stderr 1>&2 - -92. output.at:102: testing Output files: -dv -g --xml -y ... - +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -94. output.at:107: testing Output files: -dv -g --xml -o y.tab.c ... -./output.at:107: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -o y.tab.c foo.y -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./output.at:100: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml --fixed-output-files foo.y -93. output.at:104: testing Output files: %require "3.4" -dv -g --xml -y ... -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y -./output.at:102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -W input.y +./input.at:2782: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -p bar -Wno-deprecated input.y +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +stderr: +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=unsupported input.y +stderr: +./input.at:391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +82. named-refs.at:715: testing $ or @ followed by . or - ... +./named-refs.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y +./input.at:2055: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +70. input.at:3231: ok +81. named-refs.at:648: testing Unresolved references ... +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./named-refs.at:676: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +83. output.at:68: testing Output files: -dv ... +./output.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv foo.y +./input.at:2630: +set x `LC_ALL=C ls -l 'escape-in-string.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-string.y'; } || exit 77 -80. named-refs.at:635: ./input.at:2012: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-].y -./output.at:104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y +81. named-refs.at:648: ok +80. named-refs.at:635: ok +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=error +./input.at:1359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret test.y +./input.at:2458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y +44. input.at:2025: ./input.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none ok +6. input.at:173: ok +./input.at:3027: rm -f output.c +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +84. output.at:74: testing Output files: -dv >&- ... +./output.at:74: case "$PREBISON" in *valgrind*) exit 77;; esac +stderr: +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=none -Werror --trace=none +0+0 records in +0+0 records out +0 bytes copied, 4.7427e-05 s, 0.0 kB/s +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none +stdout: +./output.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv >&- foo.y -97. output.at:116: testing Output files: %header %verbose ... -./output.at:116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -95. output.at:110: testing Output files: -dv -b bar ... -98. output.at:118: testing Output files: %header %verbose %yacc ... -./input.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +./input.at:2633: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-string.y stderr: -foo.y:1.1-7: warning: POSIX Yacc does not support %define [-Wyacc] -./output.at:87: find . -type f | +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Wnone,none -Werror --trace=none +43. input.at:1936: ok +85. output.at:81: testing Output files: -dv -o foo.c ... +./output.at:68: find . -type f | "$PERL" -ne ' s,\./,,; chomp; +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -96. output.at:112: testing Output files: -dv -g -o foo.c ... -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./output.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.c foo.y + +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=none -Werror --trace=none + +86. output.at:84: testing Output files: -dv -y ... +./output.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y + +87. output.at:87: testing Output files: api.header.include={"./foo.h"} -dv -y ... +88. output.at:92: testing Output files: -dv -o foo.tab.c ... +./output.at:87: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y +./output.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.tab.c foo.y +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +89. output.at:95: testing Output files: --fixed-output-files -dv -g --html ... +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +60. input.at:2764: ok ./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=none input.y -./input.at:2820: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -99. output.at:121: testing Output files: %header %verbose %yacc ... -./output.at:110: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -b bar foo.y -./output.at:121: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -./output.at:118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -./output.at:112: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g -o foo.c foo.y -stderr: -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -100. output.at:125: testing Output files: %file-prefix "bar" %header %verbose ... -101. output.at:127: testing Output files: %output "bar.c" %header %verbose %yacc ... +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none -./output.at:68: find . -type f | +stderr: +foo.y:1.1-7: warning: POSIX Yacc does not support %define [-Wyacc] +./output.at:87: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none stderr: -./output.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -./output.at:92: find . -type f | +./output.at:81: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:87: grep '#include "./foo.h"' y.tab.c -./output.at:127: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -stderr: -stderr: +10. input.at:341: ok +90. output.at:97: testing Output files: -Hfoo.header -v -gfoo.gv --html=foo.html ... +./input.at:3028: cp input.y input.y.orig +42. input.at:1916: ok stderr: -:6: warning: deprecated option: '--fixed-output-files', use '-o y.tab.c' [-Wdeprecated] +26. input.at:1187: ok ./output.at:84: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -input.y:1.16-18: error: duplicate directive [-Werror=other] - 1 | %start exp foo exp - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %start exp foo exp - | ^~~ -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Wnone,none -Werror --trace=none -./output.at:100: find . -type f | + +stderr: +./input.at:2643: +set x `LC_ALL=C ls -l 'tstring.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='tstring.y'; } || exit 77 +83. output.at:68: ok +./output.at:92: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Werror -stdout: -#include "./foo.h" -53. input.at:2393: ok -83. output.at:68: stderr: - ok +stderr: +91. output.at:100: testing Output files: -dv -g --xml --fixed-output-files ... + ./output.at:74: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Wnone,none -Werror --trace=none +./input.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y + +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./output.at:100: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml --fixed-output-files foo.y +89. output.at:95: ./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +90. output.at:97: ./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Werror +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none +stderr: ./output.at:84: grep '#include "y.tab.h"' y.tab.c -87. output.at:87: ok stdout: -./input.at:1859: sed 's,.*/$,,' stderr 1>&2 -102. output.at:129: testing Output files: %file-prefix "baz" %output "bar.c" %header %verbose %yacc ... -stderr: -88. output.at:92: ok +17. input.at:794: 23. input.at:1045: 67. input.at:3148: ok + ok +./input.at:3029: sed -e '/fix-it/d' experr + ok + skipped (output.at:95) + skipped (output.at:97) + +92. output.at:102: testing Output files: -dv -g --xml -y ... +18. input.at:832: 7. input.at:204: ok + ok +./output.at:102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y stderr: -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=full input.y -./output.at:129: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +./output.at:81: grep '#include "foo.h"' foo.c +0+0 records in +0+0 records out +0 bytes copied, 4.3258e-05 s, 0.0 kB/s stdout: -86. output.at:84: 91. output.at:100: ok + +84. output.at:74: ok -./input.at:2460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy -84. output.at:74: ok +./output.at:87: grep '#include "./foo.h"' y.tab.c +./input.at:2646: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret tstring.y +93. output.at:104: testing Output files: %require "3.4" -dv -g --xml -y ... +94. output.at:107: testing Output files: -dv -g --xml -o y.tab.c ... + +./output.at:104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y +88. output.at:92: ok +./output.at:107: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -o y.tab.c foo.y + +96. output.at:112: testing Output files: -dv -g -o foo.c ... +97. output.at:116: testing Output files: %header %verbose ... +95. output.at:110: testing Output files: -dv -b bar ... +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=none -Werror --trace=none + +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=full input.y +./output.at:116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y + + +./output.at:112: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g -o foo.c foo.y +./output.at:110: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -b bar foo.y +98. output.at:118: testing Output files: %header %verbose %yacc ... +./output.at:118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +99. output.at:121: testing Output files: %header %verbose %yacc ... +100. output.at:125: testing Output files: %file-prefix "bar" %header %verbose ... + +./output.at:121: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +101. output.at:127: testing Output files: %output "bar.c" %header %verbose %yacc ... +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./output.at:127: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y stderr: -./output.at:81: find . -type f | +:6: warning: deprecated option: '--fixed-output-files', use '-o y.tab.c' [-Wdeprecated] +./output.at:100: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +11. input.at:401: 103. output.at:136: testing Output files: %header %verbose ... stderr: -foo.y:1.1-8: warning: POSIX Yacc does not support %require [-Wyacc] -foo.y:1.10-14: warning: POSIX Yacc does not support string literals [-Wyacc] -./input.at:1557: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.o main.o $LIBS -67. input.at:3148: ok -stderr: -./output.at:125: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; -./output.at:104: find . -type f | + ok +./output.at:102: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - push @file, $_ unless m{^(foo.y|testsuite.log)$}; -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y +102. output.at:129: testing Output files: %file-prefix "baz" %output "bar.c" %header %verbose %yacc ... +29. input.at:1275: ok +./output.at:136: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:129: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y + stderr: stderr: - END { print join (" ", sort @file), "\n" }' || exit 77 - -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./output.at:102: find . -type f | +stderr: +foo.y:1.1-8: warning: POSIX Yacc does not support %require [-Wyacc] +foo.y:1.10-14: warning: POSIX Yacc does not support string literals [-Wyacc] +./output.at:104: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:116: find . -type f | +./output.at:112: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 + ./output.at:110: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -43. input.at:1936: ok -66. input.at:3113: ok stderr: -./input.at:1681: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./output.at:107: find . -type f | +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./output.at:116: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none - +104. output.at:139: testing Output files: %header %verbose -o foo.c ... +stderr: +./output.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c foo.yy 105. output.at:142: testing Output files: --header=foo.hpp -o foo.c++ ... +test.y:4.9: error: stray '$' [-Werror=other] +test.y:5.9: error: stray '@' [-Werror=other] stderr: - - -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -93. output.at:104: ok ./output.at:142: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy -./output.at:81: grep '#include "foo.h"' foo.c stderr: -97. output.at:116: ok -./output.at:118: find . -type f | +stderr: +./output.at:107: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; +./output.at:127: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:112: find . -type f | + push @file, $_ unless m{^(foo.y|testsuite.log)$}; +./input.at:2460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2820: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1557: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.o main.o $LIBS +./output.at:125: find . -type f | "$PERL" -ne ' s,\./,,; chomp; +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y +stderr: + END { print join (" ", sort @file), "\n" }' || exit 77 push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -94. output.at:107: ok -95. output.at:110: ok - -100. output.at:125: 103. output.at:136: testing Output files: %header %verbose ... -./output.at:136: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy - -104. output.at:139: testing Output files: %header %verbose -o foo.c ... - ok -./input.at:2825: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -stdout: -92. output.at:102: #include "foo.h" -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Werror -stderr: - ok -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=unsupported input.y - ./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=none -Werror --trace=none -./output.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c foo.yy -test.y:4.9: error: stray '$' [-Werror=other] -test.y:5.9: error: stray '@' [-Werror=other] -106. output.at:146: testing Output files: --header=foo.hpp -o foo.c++ ... - -62. input.at:2840: ok -./output.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy -85. output.at:81: ok -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -108. output.at:154: testing Output files: -o foo.c++ --graph=foo.gph ... -98. output.at:118: ok -stderr: -107. output.at:150: testing Output files: %header "foo.hpp" -o foo.c++ ... - -96. output.at:112: ok -./output.at:154: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ --graph=foo.gph foo.yy -109. output.at:160: testing Output files: %type useless --header --graph --xml --report=all -Wall -Werror ... -./output.at:127: find . -type f | +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -W input.y +stdout: +./output.at:118: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -111. output.at:173: testing Output files: %defines -o foo.c++ ... - -stderr: -112. output.at:176: testing Output files: %defines "foo.hpp" -o foo.c++ ... -110. output.at:167: testing Output files: useless=--header --graph --xml --report=all -Wall -Werror ... -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y -./output.at:150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy - +stdout: +66. input.at:3113: ok +./input.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +#include "foo.h" stderr: -./named-refs.at:740: sed 's,.*/$,,' stderr 1>&2 +86. output.at:84: ok ./output.at:121: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - - -./output.at:160: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y +8. input.at:238: ok +85. output.at:81: ok +./input.at:2656: +set x `LC_ALL=C ls -l 'escape-in-tstring.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-tstring.y'; } || exit 77 stderr: - -stdout: -./output.at:173: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy -./output.at:167: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y -./input.at:2462: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:1558: $PREPARSER ./input ./output.at:129: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -113. output.at:191: testing Output files: lalr1.cc ... - -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none stderr: -./output.at:176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy -101. output.at:127: ok -stderr: -./output.at:142: find . -type f | +./output.at:136: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -99. output.at:121: ./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=error - ok -115. output.at:197: testing Output files: lalr1.cc %header %verbose ... -./input.at:1558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -116. output.at:200: testing Output files: lalr1.cc %verbose %locations ... -114. output.at:194: testing Output files: lalr1.cc %verbose ... -./output.at:200: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -102. output.at:129: 117. output.at:203: testing Output files: lalr1.cc %header %verbose %locations ... - ok -./output.at:197: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -./output.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -./output.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -30. input.at:1400: 118. output.at:206: testing Output files: lalr1.cc %header %verbose ... - ok -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./output.at:203: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy stderr: -120. output.at:215: testing Output files: lalr1.cc %header %verbose %file-prefix "output_dir/foo" ... -input.y:2.15: error: stray '$' [-Werror=other] -119. output.at:210: testing Output files: lalr1.cc %header %verbose %locations -o subdir/foo.cc ... -122. output.at:226: testing Output files: lalr1.cc %header %locations api.location.file=none %require "3.2" ... -105. output.at:142: ok -./output.at:226: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -121. output.at:220: testing Output files: lalr1.cc %header %locations %verbose %file-prefix "output_dir/foo" ... -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror -stderr: stderr: -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./output.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret subdir/foo.yy -./output.at:150: find . -type f | +./output.at:139: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:154: find . -type f | +106. output.at:146: testing Output files: --header=foo.hpp -o foo.c++ ... +./output.at:142: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -61. input.at:2793: ok -./input.at:2730: sed 's,.*/$,,' stderr 1>&2 -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y -stderr: +107. output.at:150: testing Output files: %header "foo.hpp" -o foo.c++ ... +108. output.at:154: testing Output files: -o foo.c++ --graph=foo.gph ... +./output.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +109. output.at:160: testing Output files: %type useless --header --graph --xml --report=all -Wall -Werror ... +./output.at:150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy +./output.at:154: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ --graph=foo.gph foo.yy +./output.at:160: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y +stdout: +#include "./foo.h" +87. output.at:87: ok +92. output.at:102: ok +94. output.at:107: ok +95. output.at:110: 97. output.at:116: 93. output.at:104: ./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror + ok +91. output.at:100: ok + ok + ok +96. output.at:112: ok stderr: - -./output.at:139: find . -type f | +./output.at:146: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=none input.y -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror - - -./output.at:136: find . -type f | +stderr: +stderr: +./output.at:150: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -123. output.at:231: testing Output files: lalr1.cc %header %locations api.location.file="foo.loc.hh" %require "3.2" ... -stderr: -./output.at:173: find . -type f | + +./output.at:154: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=error - -./output.at:231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy stderr: -./output.at:146: find . -type f | +foo.y:1.13-19: error: symbol 'useless' is used, but is not defined as a token and has no rules [-Werror=other] +foo.y: error: 1 nonterminal useless in grammar [-Werror=other] +foo.y:1.13-19: error: nonterminal useless in grammar: useless [-Werror=other] + +./output.at:160: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -stderr: -103. output.at:136: -107. output.at:150: ok -input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] -input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] -input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] -input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] -input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] -124. output.at:237: testing Output files: lalr1.cc %header %locations api.location.file="$at_dir/foo.loc.hh" %require "3.2" ... -104. output.at:139: ok + + + + + + +110. output.at:167: testing Output files: useless=--header --graph --xml --report=all -Wall -Werror ... +./output.at:167: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y +111. output.at:173: testing Output files: %defines -o foo.c++ ... +113. output.at:191: testing Output files: lalr1.cc ... +112. output.at:176: testing Output files: %defines "foo.hpp" -o foo.c++ ... +114. output.at:194: testing Output files: lalr1.cc %verbose ... +./output.at:173: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy +./output.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +117. output.at:203: testing Output files: lalr1.cc %header %verbose %locations ... +115. output.at:197: testing Output files: lalr1.cc %header %verbose ... +116. output.at:200: testing Output files: lalr1.cc %verbose %locations ... +./output.at:176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy +./output.at:197: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:200: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:203: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy stderr: stderr: - ok -108. output.at:154: ok -./output.at:200: find . -type f | +./output.at:194: find . -type f | "$PERL" -ne ' s,\./,,; chomp; -73. named-refs.at:297: push @file, $_ unless m{^(foo.yy|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 - ok -./output.at:191: find . -type f | +./output.at:173: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -stderr: -./output.at:176: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:237: "$PERL" -pi -e 's{\$at_dir}'"{$at_group_dir}g" foo.yy || exit 77 -126. output.at:272: testing Conflicting output files: %header "foo.output" -v ... stderr: -./input.at:2465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: -foo.y:1.13-19: error: symbol 'useless' is used, but is not defined as a token and has no rules [-Werror=other] -foo.y: error: 1 nonterminal useless in grammar [-Werror=other] -foo.y:1.13-19: error: nonterminal useless in grammar: useless [-Werror=other] stderr: foo.y:1.1-15: error: %define variable 'useless' is not used ./output.at:167: find . -type f | @@ -4318,285 +4247,375 @@ s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -127. output.at:277: testing Conflicting output files: lalr1.cc %header %locations --graph="location.hh" ... -111. output.at:173: ok -./output.at:237: rm -f foo.yy.bak -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v foo.y - -125. output.at:267: testing Conflicting output files: --graph="foo.tab.c" ... -./input.at:2247: sed 's,.*/$,,' stderr 1>&2 -stderr: -./output.at:206: find . -type f | +./output.at:197: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./output.at:194: find . -type f | +./output.at:176: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -./output.at:160: find . -type f | +stderr: +stderr: +./output.at:203: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(foo.y|testsuite.log)$}; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="location.hh" foo.y -stderr: -128. output.at:282: testing Conflicting output files: -o foo.y ... -106. output.at:146: ok -113. output.at:191: ok stderr: -112. output.at:176: ok -./output.at:215: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy - -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -./output.at:226: find . -type f | +./output.at:200: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Wnone,none -Werror --trace=none -./output.at:197: find . -type f | +./output.at:191: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o foo.y foo.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full input.y -110. output.at:167: ok stderr: -130. output.at:335: testing Output file name: ( ... -./output.at:335: touch "(.tmp" || exit 77 stdout: -116. output.at:200: ok -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -131. output.at:336: testing Output file name: ) ... -./output.at:336: touch ").tmp" || exit 77 -129. output.at:328: testing Output file name: `~!@#$%^&*()-=_+{}[]|\:;<>, .' ... -./output.at:328: touch "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.tmp" || exit 77 -./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" glr.y - -114. output.at:194: ok -122. output.at:226: ok -./output.at:206: grep 'include .subdir/' foo.tab.cc +./named-refs.at:185: $PREPARSER ./test input.txt +./named-refs.at:740: sed 's,.*/$,,' stderr 1>&2 +./input.at:3030: echo "bison: file 'input.y' was updated (backup: 'input.y~')" >>experr +62. input.at:2840: ok stderr: -./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.c" --header="(.h" glr.y +102. output.at:129: 98. output.at:118: ok +0+0 records in +0+0 records out +0 bytes copied, 3.6365e-05 s, 0.0 kB/s + ok +stdout: +101. output.at:127: ok +100. output.at:125: 99. output.at:121: ok + ok +103. output.at:136: ok +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +104. output.at:139: ./input.at:2659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-tstring.y + ok + + + + + + + + +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=unsupported input.y +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:2825: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2462: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +118. output.at:206: testing Output files: lalr1.cc %header %verbose ... +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy +119. output.at:210: testing Output files: lalr1.cc %header %verbose %locations -o subdir/foo.cc ... +121. output.at:220: testing Output files: lalr1.cc %header %locations %verbose %file-prefix "output_dir/foo" ... stderr: -./output.at:203: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; - push @file, $_ unless m{^(foo.yy|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 -109. output.at:160: ok -input.y:3.13-14: error: useless %printer for type <> [-Werror=other] -./input.at:1694: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -132. output.at:337: testing Output file name: # ... -./output.at:337: touch "#.tmp" || exit 77 +109. output.at:160: 53. input.at:2393: stdout: +122. output.at:226: testing Output files: lalr1.cc %header %locations api.location.file=none %require "3.2" ... +120. output.at:215: testing Output files: lalr1.cc %header %verbose %file-prefix "output_dir/foo" ... + ok + ok +123. output.at:231: testing Output files: lalr1.cc %header %locations api.location.file="foo.loc.hh" %require "3.2" ... +./input.at:1558: $PREPARSER ./input +124. output.at:237: testing Output files: lalr1.cc %header %locations api.location.file="$at_dir/foo.loc.hh" %require "3.2" ... +108. output.at:154: ok +110. output.at:167: 117. output.at:203: ok + ok +105. output.at:142: 125. output.at:267: testing Conflicting output files: --graph="foo.tab.c" ... +./output.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret subdir/foo.yy + ok 115. output.at:197: ok +./output.at:226: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +112. output.at:176: 107. output.at:150: ./output.at:231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy + ok + ok +./output.at:237: "$PERL" -pi -e 's{\$at_dir}'"{$at_group_dir}g" foo.yy || exit 77 +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y +114. output.at:194: 111. output.at:173: ok + ok +106. output.at:146: 116. output.at:200: ok + ok +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +113. output.at:191: ok -./output.at:206: grep 'include .subdir/' foo.tab.hh -./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").c" --header=").h" glr.y -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Wnone,none -Werror --trace=none -./output.at:210: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o subdir/foo.cc subdir/foo.yy -133. output.at:338: testing Output file name: @@ ... -./output.at:338: touch "@@.tmp" || exit 77 -./output.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy -stderr: -./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.c" --header="#.h" glr.y -./input.at:1116: sed 's,.*/$,,' stderr 1>&2 -input.yy:2.1-32: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] -input.yy:4.1-32: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:5.1-32: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:3.1-32: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] + + + + + + + + + +126. output.at:272: testing Conflicting output files: %header "foo.output" -v ... +127. output.at:277: testing Conflicting output files: lalr1.cc %header %locations --graph="location.hh" ... +128. output.at:282: testing Conflicting output files: -o foo.y ... +129. output.at:328: testing Output file name: `~!@#$%^&*()-=_+{}[]|\:;<>, .' ... +./output.at:328: touch "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.tmp" || exit 77 +130. output.at:335: testing Output file name: ( ... +./output.at:335: touch "(.tmp" || exit 77 +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v foo.y +131. output.at:336: testing Output file name: ) ... +./output.at:336: touch ").tmp" || exit 77 +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="location.hh" foo.y +stderr: +./output.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o foo.y foo.y 134. output.at:339: testing Output file name: @{ ... ./output.at:339: touch "@{.tmp" || exit 77 -stderr: +132. output.at:337: testing Output file name: # ... +./output.at:337: touch "#.tmp" || exit 77 +input.y:1.16-18: error: duplicate directive [-Werror=other] + 1 | %start exp foo exp + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %start exp foo exp + | ^~~ +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +133. output.at:338: testing Output file name: @@ ... +./output.at:338: touch "@@.tmp" || exit 77 +./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" glr.y +./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.c" --header="(.h" glr.y +137. output.at:342: testing Output file name: ] ... +./output.at:342: touch "].tmp" || exit 77 +138. output.at:363: testing Graph with no conflicts ... +136. output.at:341: testing Output file name: [ ... +./output.at:341: touch "[.tmp" || exit 77 135. output.at:340: testing Output file name: @} ... ./output.at:340: touch "@}.tmp" || exit 77 -36. input.at:1642: stderr: - ok -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=unsupported input.y +./output.at:363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +139. output.at:403: testing Graph with unsolved S/R ... +./output.at:403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").c" --header=").h" glr.y +./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.c" --header="@{.h" glr.y +./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.c" --header="#.h" glr.y ./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.c" --header="@@.h" glr.y - -25. input.at:1139: ./output.at:215: find . -type f | +./output.at:215: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy +./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.c" --header="@}.h" glr.y +./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.c" --header="[.h" glr.y +stderr: +./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].c" --header="].h" glr.y +stderr: +./output.at:231: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -117. output.at:203: ./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Werror - ok -./output.at:231: find . -type f | +stderr: +./output.at:210: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o subdir/foo.cc subdir/foo.yy +./output.at:226: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -118. output.at:206: - - ok - ok -136. output.at:341: testing Output file name: [ ... -./output.at:341: touch "[.tmp" || exit 77 -138. output.at:363: testing Graph with no conflicts ... -137. output.at:342: testing Output file name: ] ... -./output.at:342: touch "].tmp" || exit 77 -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.c" --header="@}.h" glr.y -./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.c" --header="@{.h" glr.y -./output.at:363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y - -./input.at:2214: sed 's,.*/$,,' stderr 1>&2 -120. output.at:215: ./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=none -Werror --trace=none -stderr: -./input.at:2467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y - -./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.c" --header="[.h" glr.y -./output.at:335: ls "(.c" "(.h" -139. output.at:403: testing Graph with unsolved S/R ... -./output.at:403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +./output.at:206: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 ./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Werror -123. output.at:231: ./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].c" --header="].h" glr.y - ok +./output.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy +stderr: +input.y: warning: 3 shift/reduce conflicts [-Wconflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:10.10-18: warning: rule useless in parser due to conflicts [-Wother] +input.y:11.10-18: warning: rule useless in parser due to conflicts [-Wother] +input.y:12.10-18: warning: rule useless in parser due to conflicts [-Wother] +./output.at:403: grep -v // input.gv +./input.at:3031: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update input.y +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Werror +./output.at:237: rm -f foo.yy.bak ./output.at:282: cat foo.y -141. output.at:538: testing Graph with R/R ... +stderr: +./named-refs.at:185: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./input.at:1681: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +stderr: +73. named-refs.at:297: ok +./input.at:1558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Werror - ok -stdout: -./output.at:237: find . -type f | +stderr: +stderr: +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Werror +./output.at:363: grep -v // input.gv +./output.at:215: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 +27. input.at:1219: ok +57. input.at:2582: ok +stderr: +./output.at:210: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -(.c -(.h -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error - -./output.at:538: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -./output.at:335: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "(.c" -140. output.at:473: testing Graph with solved S/R ... -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error -142. output.at:576: testing Graph with reductions with multiple LAT ... -./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" -./output.at:576: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y - -./output.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -128. output.at:282: ok -stdout: - -143. output.at:641: testing Graph with a reduction rule both enabled and disabled ... -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.c -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.h -./output.at:328: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" -./output.at:641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y stderr: - -124. output.at:237: 145. diagnostics.at:84: testing Warnings ... ./output.at:220: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - ok -stderr: -146. diagnostics.at:133: testing Single point locations ... -foo.y: error: conflicting outputs to file 'foo.output' [-Werror=other] + + ./output.at:336: ls ").c" ").h" +140. output.at:473: testing Graph with solved S/R ... +./output.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +141. output.at:538: testing Graph with R/R ... +./output.at:339: ls "@{.c" "@{.h" +stderr: +./output.at:538: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +stderr: +input.y:6.23-28: error: unused value: $4 [-Werror=other] +input.y:8.9-11: error: unset value: $$ [-Werror=other] +./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" +input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] +input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] +input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] +input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] +input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] +./output.at:335: ls "(.c" "(.h" +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=none input.y + +61. input.at:2793: ok + +142. output.at:576: testing Graph with reductions with multiple LAT ... +./output.at:576: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=error +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y +143. output.at:641: testing Graph with a reduction rule both enabled and disabled ... +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y +./input.at:1859: sed 's,.*/$,,' stderr 1>&2 +stderr: +input.y:6.5-7: warning: rule useless in parser due to conflicts [-Wother] +input.y:14.10-18: warning: rule useless in parser due to conflicts [-Wother] +input.y:15.10-18: warning: rule useless in parser due to conflicts [-Wother] stderr: +./output.at:473: grep -v // input.gv +./output.at:641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +./output.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:4.3: warning: rule useless in parser due to conflicts [-Wother] +./output.at:538: grep -v // input.gv +30. input.at:1400: 139. output.at:403: ok + ok +./input.at:3034: diff input.y.orig input.y~ +./output.at:206: grep 'include .subdir/' foo.tab.cc +138. output.at:363: ./output.at:210: grep 'include .subdir/' subdir/foo.cc + ok +120. output.at:215: ok +128. output.at:282: ok +122. output.at:226: ok +123. output.at:231: ok + + + + + +stderr: +stderr: +./output.at:341: ls "[.c" "[.h" +input.y: warning: 3 reduce/reduce conflicts [-Wconflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:2.14-18: warning: rule useless in parser due to conflicts [-Wother] +input.y:5.3: warning: rule useless in parser due to conflicts [-Wother] +145. diagnostics.at:84: testing Warnings ... +146. diagnostics.at:133: testing Single point locations ... 144. output.at:744: testing C++ Output File Prefix Mapping ... -./output.at:338: ls "@@.c" "@@.h" -./output.at:337: ls "#.c" "#.h" 147. diagnostics.at:182: testing Line is too short, and then you die ... -input.y: warning: 3 shift/reduce conflicts [-Wconflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:10.10-18: warning: rule useless in parser due to conflicts [-Wother] -input.y:11.10-18: warning: rule useless in parser due to conflicts [-Wother] -input.y:12.10-18: warning: rule useless in parser due to conflicts [-Wother] +./output.at:576: grep -v // input.gv +input.y:2.15: error: stray '$' [-Werror=other] +./output.at:337: ls "#.c" "#.h" +149. diagnostics.at:235: testing Tabulations and multibyte characters ... +./output.at:342: ls "].c" "].h" +148. diagnostics.at:217: testing Zero-width characters ... +./output.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x1.cc -M out/=bar/ x1.yy +./output.at:338: ls "@@.c" "@@.h" +./output.at:340: ls "@}.c" "@}.h" +150. diagnostics.at:282: testing Tabulations and multibyte characters ... +./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:217: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 +./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./input.at:2465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2247: sed 's,.*/$,,' stderr 1>&2 +./input.at:1175: sed 's,.*/$,,' stderr 1>&2 +stdout: +stdout: stdout: -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none stderr: +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.c +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.h ).c ).h -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=none -Werror --trace=none -./output.at:403: grep -v // input.gv -./output.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x1.cc -M out/=bar/ x1.yy -121. output.at:220: ./output.at:272: sed 's,.*/$,,' stderr 1>&2 - ok -./output.at:342: ls "].c" "].h" +(.c +(.h +71. named-refs.at:22: ok stdout: -./output.at:336: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c ").c" -148. diagnostics.at:217: testing Zero-width characters ... +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +@{.c +@{.h +121. output.at:220: ok + + +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full input.y stdout: stdout: +./output.at:206: grep 'include .subdir/' foo.tab.hh +151. diagnostics.at:303: testing Special files ... #.c #.h -./output.at:337: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "#.c" -./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -@@.c -@@.h -150. diagnostics.at:282: testing Tabulations and multibyte characters ... +[.c +[.h +152. diagnostics.at:328: testing Complaints from M4 ... +142. output.at:576: 141. output.at:538: ok + ok +140. output.at:473: ok +./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y + + + +./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y stderr: -./output.at:338: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@@.c" stderr: -149. diagnostics.at:235: testing Tabulations and multibyte characters ... foo.y: error: conflicting outputs to file 'foo.tab.c' [-Werror=other] - +foo.y: error: conflicting outputs to file 'foo.output' [-Werror=other] +153. diagnostics.at:351: testing Carriage return ... +155. diagnostics.at:399: testing Screen width: 200 columns ... +154. diagnostics.at:372: testing CR NL ... +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Wnone,none -Werror --trace=none +./diagnostics.at:351: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 +./diagnostics.at:372: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 +./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret --color=debug -Wall,cex input.y +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./output.at:328: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" +./input.at:2730: sed 's,.*/$,,' stderr 1>&2 +./output.at:339: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@{.c" +./output.at:336: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c ").c" +stdout: +./output.at:210: grep 'include .subdir/' subdir/foo.hh +./input.at:3037: test ! -f output.c +./output.at:335: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "(.c" +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y +./output.at:775: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x1.o out/x1.cc +stdout: stdout: -stderr: -./output.at:210: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; -./named-refs.at:185: $PREPARSER ./test input.txt ].c ].h -./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y - push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 -stderr: -82. named-refs.at:715: ok -./output.at:363: grep -v // input.gv -./output.at:342: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "].c" -139. output.at:403: ./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y - ok -./input.at:2697: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y -stderr: -./output.at:340: ls "@}.c" "@}.h" -./named-refs.at:185: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./input.at:2469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=error -stdout: -./diagnostics.at:217: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 - -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +@@.c +@@.h @}.c @}.h -138. output.at:363: ok -foo.y: error: conflicting outputs to file 'location.hh' [-Werror=other] -./output.at:340: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@}.c" -stderr: -input.y: warning: 3 reduce/reduce conflicts [-Wconflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:2.14-18: warning: rule useless in parser due to conflicts [-Wother] -input.y:5.3: warning: rule useless in parser due to conflicts [-Wother] -./output.at:576: grep -v // input.gv -./output.at:267: sed 's,.*/$,,' stderr 1>&2 -151. diagnostics.at:303: testing Special files ... -./output.at:341: ls "[.c" "[.h" -stderr: ./diagnostics.at:182: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -4605,25 +4624,7 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./output.at:210: grep 'include .subdir/' subdir/foo.cc -./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y - -stderr: -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:4.3: warning: rule useless in parser due to conflicts [-Wother] -stdout: -[.c -[.h -71. named-refs.at:22: ok -./output.at:339: ls "@{.c" "@{.h" -./output.at:538: grep -v // input.gv -./output.at:277: sed 's,.*/$,,' stderr 1>&2 -./output.at:210: grep 'include .subdir/' subdir/foo.hh -./output.at:341: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "[.c" -142. output.at:576: 152. diagnostics.at:328: testing Complaints from M4 ... - ok +./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:84: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -4632,21 +4633,10 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./output.at:775: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x1.o out/x1.cc -./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=error -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y -141. output.at:538: -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -stdout: - ok -119. output.at:210: ok -./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -@{.c -@{.h -./diagnostics.at:217: "$PERL" -pi -e ' +stderr: +stderr: +input.y:3.13-14: error: useless %printer for type <> [-Werror=other] +./diagnostics.at:133: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -4654,31 +4644,21 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none -153. diagnostics.at:351: testing Carriage return ... - - -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-none input.y -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=error -./output.at:339: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@{.c" -./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y stderr: -154. diagnostics.at:372: testing CR NL ... -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Wnone,none -Werror --trace=none -input.y:6.5-7: warning: rule useless in parser due to conflicts [-Wother] -input.y:14.10-18: warning: rule useless in parser due to conflicts [-Wother] -input.y:15.10-18: warning: rule useless in parser due to conflicts [-Wother] -./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./output.at:473: grep -v // input.gv -./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y -147. diagnostics.at:182: ok - - -148. diagnostics.at:217: ok -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -140. output.at:473: ok -./diagnostics.at:133: "$PERL" -pi -e ' +input.y: warning: 4 shift/reduce conflicts [-Wconflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./output.at:641: grep -v // input.gv +./output.at:237: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 +stderr: +input.yy:2.1-32: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] +input.yy:4.1-32: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:5.1-32: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:3.1-32: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] +./diagnostics.at:235: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -4686,13 +4666,26 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -54. input.at:2429: ok -156. diagnostics.at:432: testing Screen width: 80 columns ... -155. diagnostics.at:399: testing Screen width: 200 columns ... -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y +stderr: +foo.y: error: conflicting outputs to file 'location.hh' [-Werror=other] +./output.at:272: sed 's,.*/$,,' stderr 1>&2 +./input.at:3040: sed -e '1,8d' input.y +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y +./output.at:337: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "#.c" +./output.at:267: sed 's,.*/$,,' stderr 1>&2 +./output.at:341: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "[.c" +118. output.at:206: 119. output.at:210: ok + ok +stderr: +stdout: -./diagnostics.at:351: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 -./diagnostics.at:282: "$PERL" -pi -e ' + +156. diagnostics.at:432: testing Screen width: 80 columns ... +157. diagnostics.at:465: testing Screen width: 60 columns ... +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:303: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -4700,26 +4693,7 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -157. diagnostics.at:465: testing Screen width: 60 columns ... -./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -stderr: -158. diagnostics.at:504: testing Suggestions ... -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./diagnostics.at:372: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 -./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -input.y:2.8-10: error: duplicate directive [-Werror=other] - 2 | %start exp - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %start exp foo - | ^~~ -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] - - -159. diagnostics.at:527: testing Counterexamples ... -./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Wnone,none -Werror --trace=none -./diagnostics.at:303: "$PERL" -pi -e ' +./diagnostics.at:399: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -4727,12 +4701,8 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 - -./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret --color=debug -Wall,cex input.y -145. diagnostics.at:84: ok - -./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:351: "$PERL" -pi -e ' +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=none -Werror --trace=none +./diagnostics.at:328: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -4740,47 +4710,59 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret --color=debug -Wall,cex input.y -./input.at:1877: sed 's,.*/$,,' stderr 1>&2 -./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y -./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=none -Werror --trace=none -./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:235: "$PERL" -pi -e ' +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:2467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./output.at:342: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "].c" +./input.at:2214: sed 's,.*/$,,' stderr 1>&2 +./output.at:277: sed 's,.*/$,,' stderr 1>&2 +./input.at:1116: sed 's,.*/$,,' stderr 1>&2 +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=unsupported input.y +./output.at:340: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@}.c" +./output.at:338: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@@.c" +143. output.at:641: ok +124. output.at:237: ok +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror + + +158. diagnostics.at:504: testing Suggestions ... +159. diagnostics.at:527: testing Counterexamples ... +./diagnostics.at:282: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { -160. diagnostics.at:645: testing Deep Counterexamples ... -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./diagnostics.at:504: "$PERL" -pi -e ' + ++$example; + $_ = "" if $example % 2 == 0; + } +' experr || exit 77 +./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=error +./input.at:1694: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./input.at:3062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./diagnostics.at:465: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { -161. diagnostics.at:713: testing Indentation with message suppression ... -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-other input.y ++$example; $_ = "" if $example % 2 == 0; } ' experr || exit 77 -49. input.at:2224: ok +./diagnostics.at:432: "$PERL" -pi -e ' + s{()}{ $1 eq "" ? $1 : "" }ge; + if (/Example/) + { ++$example; $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -163. skeletons.at:85: testing Installed skeleton file names ... -162. skeletons.at:25: testing Relative skeleton file names ... -./skeletons.at:27: mkdir tmp +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none ./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y - -153. diagnostics.at:351: ./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Wnone,none -Werror --trace=none - ok -./skeletons.at:120: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=yacc.c -o input-cmd-line.c input-cmd-line.y -./diagnostics.at:328: "$PERL" -pi -e ' +./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:504: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -4788,21 +4770,13 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y - -158. diagnostics.at:504: ok -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Werror -./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none -164. skeletons.at:142: testing Boolean=variables: invalid skeleton defaults ... -./skeletons.at:63: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret tmp/input-gram.y -./output.at:272: cat foo.y -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=none -Werror --trace=none -146. diagnostics.at:133: ok -./skeletons.at:155: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -126. output.at:272: - ok -./diagnostics.at:432: "$PERL" -pi -e ' +./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Wnone,none -Werror --trace=none +./input.at:2697: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +./diagnostics.at:527: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -4810,9 +4784,17 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -151. diagnostics.at:303: ok -165. skeletons.at:166: testing Complaining during macro argument expansion ... -./diagnostics.at:645: "$PERL" -pi -e ' +./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret -Wall,cex input.y +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=error +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error +./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=error +./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +82. named-refs.at:715: ok +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +147. diagnostics.at:182: ok +./diagnostics.at:217: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -4820,6 +4802,9 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 +36. input.at:1642: ok + + ./diagnostics.at:372: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -4827,8 +4812,154 @@ ++$example; $_ = "" if $example % 2 == 0; } +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none ' experr || exit 77 -./diagnostics.at:399: "$PERL" -pi -e ' +160. diagnostics.at:645: testing Deep Counterexamples ... +161. diagnostics.at:713: testing Indentation with message suppression ... +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-other input.y + +stderr: +./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=error +./input.at:2469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +162. skeletons.at:25: testing Relative skeleton file names ... +./skeletons.at:27: mkdir tmp +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +input.y:12.10-32: error: unset value: $$ [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~ +input.y:12.10-12: error: unused value: $1 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.18-20: error: unused value: $3 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.26-28: error: unused value: $5 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] + 13 | b: INT | %empty; + | ^~~~~~ +input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~ +input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~ +input.y:14.10-62: error: unset value: $$ [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:14.22-24: error: unused value: $3 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:14.43-45: error: unused value: $5 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:15.10-49: error: unset value: $$ [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:15.18-20: error: unused value: $3 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:15.30-32: error: unused value: $5 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:16.10-37: error: unset value: $$ [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:16.18-20: error: unused value: $3 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:16.27-29: error: unused value: $5 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:18.10-58: error: unset value: $$ [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:18.10-12: error: unused value: $1 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ +input.y:18.31-33: error: unused value: $3 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ +input.y:18.52-54: error: unused value: $5 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:19.10-72: error: unset value: $$ [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:19.10-12: error: unused value: $1 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.31-33: error: unused value: $3 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:19.66-68: error: unused value: $5 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] + 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; + | ^~~~~~~~~~~~~~~~~~~~ +input.y:22.10-68: error: unset value: $$ [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:22.10-12: error: unused value: $1 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:22.14-16: error: unused value: $2 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:25.23-25: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.10-50: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:25.10-12: error: unused value: $1 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.23-25: error: unused value: $2 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.27-29: error: unused value: $3 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unused value: $4 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.44-46: error: unused value: $5 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:26.23-25: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +input.y:26.40-42: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y +./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret -Wall,cex input.y +./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret -Wall,cex input.y +149. diagnostics.at:235: ok +64. input.at:2946: ok +./diagnostics.at:351: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -4836,9 +4967,30 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret -Wall,cex input.y + +./skeletons.at:63: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret tmp/input-gram.y +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Werror +163. skeletons.at:85: testing Installed skeleton file names ... +./skeletons.at:120: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=yacc.c -o input-cmd-line.c input-cmd-line.y +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y + +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=none -Werror --trace=none +./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./input.at:786: sed 's,.*/$,,' stderr 1>&2 +./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +152. diagnostics.at:328: ok +151. diagnostics.at:303: ok +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none +164. skeletons.at:142: testing Boolean=variables: invalid skeleton defaults ... +155. diagnostics.at:399: ok +145. diagnostics.at:84: ok +150. diagnostics.at:282: ok +./skeletons.at:155: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./skeletons.at:64: cat input-gram.tab.c -./diagnostics.at:465: "$PERL" -pi -e ' +146. diagnostics.at:133: ./diagnostics.at:645: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -4846,97 +4998,207 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./skeletons.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y -./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Werror -./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret -Wall,cex input.y -150. diagnostics.at:282: ok -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=none -Werror --trace=none -166. skeletons.at:248: testing Fatal errors make M4 exit immediately ... + ok -./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -58. input.at:2675: ok -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y -./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret -Wall,cex input.y -./skeletons.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-gram.y -./skeletons.at:262: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y -./skeletons.at:121: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-cmd-line input-cmd-line.c $LIBS stderr: -input.y:2.15: error: stray '$' [-Werror=other] +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Wnone,none -Werror --trace=none +stdout: -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -164. skeletons.at:142: ok +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Wnone,none -Werror --trace=none +./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" cxx.y +165. skeletons.at:166: testing Complaining during macro argument expansion ... +stderr: +166. skeletons.at:248: testing Fatal errors make M4 exit immediately ... +stdout: +./skeletons.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y -149. diagnostics.at:235: ok -167. skeletons.at:302: testing Fatal errors but M4 continues producing output ... -./output.at:267: cat foo.y +./skeletons.at:262: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y +./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +164. skeletons.at:142: +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +stderr: +input.y:2.1-12: error: deprecated directive: '%pure-parser', use '%define api.pure' [-Werror=deprecated] + 2 | %pure-parser + | ^~~~~~~~~~~~ + | %define api.pure +input.y:3.1-14: error: deprecated directive: '%error-verbose', use '%define parse.error verbose' [-Werror=deprecated] + 3 | %error-verbose + | ^~~~~~~~~~~~~~ + | %define parse.error verbose -160. diagnostics.at:645: ok + ok +./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").cc" --header=").hh" cxx.y +156. diagnostics.at:432: ok +167. skeletons.at:302: testing Fatal errors but M4 continues producing output ... +25. input.at:1139: ./skeletons.at:121: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-cmd-line input-cmd-line.c $LIBS + ok +./skeletons.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-gram.y ./skeletons.at:314: "$PERL" gen-skel.pl > skel.c || exit 77 -./input.at:2733: sed 's,.*/$,,' stderr 1>&2 -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +148. diagnostics.at:217: stderr: +input.y:12.10-32: error: unset value: $$ [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~ +input.y:12.10-12: error: unused value: $1 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.18-20: error: unused value: $3 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.26-28: error: unused value: $5 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] + 13 | b: INT | %empty; + | ^~~~~~ +input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~ +input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~ +input.y:14.10-62: error: unset value: $$ [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:14.22-24: error: unused value: $3 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:14.43-45: error: unused value: $5 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:15.10-49: error: unset value: $$ [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:15.18-20: error: unused value: $3 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:15.30-32: error: unused value: $5 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:16.10-37: error: unset value: $$ [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:16.18-20: error: unused value: $3 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:16.27-29: error: unused value: $5 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:18.10-58: error: unset value: $$ [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:18.10-12: error: unused value: $1 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ +input.y:18.31-33: error: unused value: $3 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ +input.y:18.52-54: error: unused value: $5 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:19.10-72: error: unset value: $$ [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:19.10-12: error: unused value: $1 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.31-33: error: unused value: $3 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:19.66-68: error: unused value: $5 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] + 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; + | ^~~~~~~~~~~~~~~~~~~~ +input.y:22.10-68: error: unset value: $$ [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:22.10-12: error: unused value: $1 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:22.14-16: error: unused value: $2 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:25.23-25: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.10-50: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:25.10-12: error: unused value: $1 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.23-25: error: unused value: $2 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.27-29: error: unused value: $3 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unused value: $4 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.44-46: error: unused value: $5 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:26.23-25: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +input.y:26.40-42: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +158. diagnostics.at:504: 54. input.at:2429: ok + ok 168. sets.at:27: testing Nullable ... ./sets.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y -152. diagnostics.at:328: ok -125. output.at:267: ok -170. sets.at:153: testing Firsts ... -./skeletons.at:69: cat input-gram.tab.c 169. sets.at:111: testing Broken Closure ... ./sets.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y + ok +157. diagnostics.at:465: ok -./sets.at:171: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y +./skeletons.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +170. sets.at:153: testing Firsts ... +172. sets.at:269: testing Build relations ... +stderr: +stdout: +./sets.at:171: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y +./skeletons.at:279: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y 171. sets.at:228: testing Accept ... -154. diagnostics.at:372: ok -./skeletons.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=tmp/skel.c tmp/input-cmd-line.y -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=error ./sets.at:240: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -./skeletons.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./diagnostics.at:527: "$PERL" -pi -e ' - s{()}{ $1 eq "" ? $1 : "" }ge; - if (/Example/) - { - ++$example; - $_ = "" if $example % 2 == 0; - } -' experr || exit 77 - -stderr: -input.y:2.1-12: error: deprecated directive: '%pure-parser', use '%define api.pure' [-Werror=deprecated] - 2 | %pure-parser - | ^~~~~~~~~~~~ - | %define api.pure -input.y:3.1-14: error: deprecated directive: '%error-verbose', use '%define parse.error verbose' [-Werror=deprecated] - 3 | %error-verbose - | ^~~~~~~~~~~~~~ - | %define parse.error verbose +./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.cc" --header="@{.hh" cxx.y +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Wnone,none -Werror --trace=none +./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" -172. sets.at:269: testing Build relations ... -./output.at:277: cat foo.y +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none 173. sets.at:315: testing Reduced Grammar ... -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y - -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y 174. sets.at:394: testing Reduced Grammar with prec and assoc ... -48. input.at:2191: ok -./diagnostics.at:725: sed 's,.*/$,,' stderr 1>&2 -./skeletons.at:74: cat input-cmd-line.tab.c +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y ./sets.at:412: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -156. diagnostics.at:432: 127. output.at:277: ok - ok -./skeletons.at:209: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y -./skeletons.at:279: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -155. diagnostics.at:399: ok -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y stderr: -167. skeletons.at:302: ok -./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -178. reduce.at:224: testing Useless Parts ... -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y bison (GNU Bison) 3.8.2 RITEM 0: e $end (rule 0) @@ -5008,20 +5270,13 @@ State 3: rule 0: -175. reduce.at:26: testing Useless Terminals ... -./reduce.at:47: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -176. reduce.at:70: testing Useless Nonterminals ... -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=error -162. skeletons.at:25: ok -stderr: ./sets.at:43: sed -f extract.sed stderr -157. diagnostics.at:465: ok -177. reduce.at:120: testing Useless Rules ... -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -input.y:2.16-18: error: useless %printer for type <*> [-Werror=other] +./skeletons.at:209: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y stderr: +175. reduce.at:26: testing Useless Terminals ... +./reduce.at:47: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none bison (GNU Bison) 3.8.2 RITEM 0: a $end (rule 0) @@ -5262,27 +5517,21 @@ State 10: rule 0: -./sets.at:243: sed -n 's/.*define YYFINAL *\([0-9][0-9]*\)/final state \1/p' input.c +58. input.at:2675: ok +159. diagnostics.at:527: ok ./sets.at:127: sed -n 's/[ ]*$//;/^RTC: Firsts Output BEGIN/,/^RTC: Firsts Output END/p' stderr +./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -stdout: -168. sets.at:27: ok +176. reduce.at:70: testing Useless Nonterminals ... +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +153. diagnostics.at:351: ok +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:1124: sed 's,.*/$,,' stderr 1>&2 -final state 6 -./sets.at:248: sed -n ' - /^State \(.*\)/{ - s//final state \1/ - x - } - / accept/{ - x - p - q - } - ' input.output +./sets.at:243: sed -n 's/.*define YYFINAL *\([0-9][0-9]*\)/final state \1/p' input.c stderr: +177. reduce.at:120: testing Useless Rules ... +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y bison (GNU Bison) 3.8.2 RITEM 0: exp $end (rule 0) @@ -5403,58 +5652,81 @@ State 15: rule 6: $end '<' '>' '+' '-' '^' '=' -./skeletons.at:223: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input3.y - -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -169. sets.at:111: ok - ./sets.at:172: sed -f extract.sed stderr -171. sets.at:228: ok - -180. reduce.at:406: testing Underivable Rules ... -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +178. reduce.at:224: testing Useless Parts ... +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y 179. reduce.at:312: testing Reduced Automaton ... ./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret not-reduced.y -166. skeletons.at:248: ok -182. reduce.at:550: testing no lr.type: Single State Split ... -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Wnone,none -Werror --trace=none -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Werror +174. sets.at:394: ok +180. reduce.at:406: testing Underivable Rules ... +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./output.at:336: ls ").cc" ").hh" +./reduce.at:49: sed -n '/^Grammar/q;/^$/!p' input.output + +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y +./diagnostics.at:725: sed 's,.*/$,,' stderr 1>&2 +stdout: +./input.at:776: sed 's,.*/$,,' stderr 1>&2 +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.cc +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.hh +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Werror 181. reduce.at:452: testing Bad start symbols ... -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y -170. sets.at:153: ok -183. reduce.at:550: testing lr.type=lalr: Single State Split ... -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +154. diagnostics.at:372: ok +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Werror +168. sets.at:27: ok +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./skeletons.at:69: cat input-gram.tab.c +stdout: +./output.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" +stdout: +).cc +).hh +166. skeletons.at:248: ok +./skeletons.at:223: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input3.y +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=none -Werror --trace=none +final state 6 +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-none input.y +stderr: +167. skeletons.at:302: stdout: +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=none -Werror --trace=none + ok +./sets.at:248: sed -n ' + /^State \(.*\)/{ + s//final state \1/ + x + } + / accept/{ + x + p + q + } + ' input.output +./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.cc" --header="(.hh" cxx.y +170. sets.at:153: ok -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -185. reduce.at:550: testing lr.type=canonical-lr: Single State Split ... -186. reduce.at:783: testing no lr.type: Lane Split ... +160. diagnostics.at:645: + ok +175. reduce.at:26: ok +169. sets.at:111: ok +182. reduce.at:550: testing no lr.type: Single State Split ... +183. reduce.at:550: testing lr.type=lalr: Single State Split ... +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror + -184. reduce.at:550: testing lr.type=ielr: Single State Split ... ./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Wnone,none -Werror --trace=none -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=none -Werror --trace=none +184. reduce.at:550: testing lr.type=ielr: Single State Split ... ./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Werror -188. reduce.at:783: testing lr.type=ielr: Lane Split ... -./skeletons.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input4.y -174. sets.at:394: 187. reduce.at:783: testing lr.type=lalr: Lane Split ... - ok -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y stderr: -189. reduce.at:783: testing lr.type=canonical-lr: Lane Split ... -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -159. diagnostics.at:527: ok bison (GNU Bison) 3.8.2 input.y: error: 1 nonterminal useless in grammar [-Werror=other] input.y: error: 1 rule useless in grammar [-Werror=other] @@ -5512,36 +5784,11 @@ reduced input.y defines 7 terminals, 4 nonterminals, and 6 productions. -40. input.at:1826: ok -./reduce.at:480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -190. reduce.at:1027: testing no lr.type: Complex Lane Split ... -./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror - -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=none -Werror --trace=none -63. input.at:2883: ok -./reduce.at:49: sed -n '/^Grammar/q;/^$/!p' input.output -./sets.at:325: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:783: sed -n '/^State 0$/,$p' input.output -175. reduce.at:26: ok -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=error -./reduce.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:550: sed -n '/^State 0$/,$p' input.output -165. skeletons.at:166: ok -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Werror -191. reduce.at:1027: testing lr.type=lalr: Complex Lane Split ... stderr: -./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: -input.y: error: 3 nonterminals useless in grammar [-Werror=other] -input.y: error: 3 rules useless in grammar [-Werror=other] -input.y:11.1-8: error: nonterminal useless in grammar: useless1 [-Werror=other] -input.y:12.1-8: error: nonterminal useless in grammar: useless2 [-Werror=other] -input.y:13.1-8: error: nonterminal useless in grammar: useless3 [-Werror=other] -192. reduce.at:1027: testing lr.type=ielr: Complex Lane Split ... - +186. reduce.at:783: testing no lr.type: Lane Split ... +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y input.y: error: 5 reduce/reduce conflicts [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:2.14-17: error: rule useless in parser due to conflicts [-Werror=other] @@ -5559,61 +5806,65 @@ input.y:2.42-45: error: rule useless in parser due to conflicts [-Werror=other] 2 | expr: term | term | term | term | term | term | ^~~~ -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y + +./output.at:339: ls "@{.cc" "@{.hh" +185. reduce.at:550: testing lr.type=canonical-lr: Single State Split ... +stdout: +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.cc" --header="[.hh" cxx.y +187. reduce.at:783: testing lr.type=lalr: Lane Split ... +189. reduce.at:783: testing lr.type=canonical-lr: Lane Split ... +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +188. reduce.at:783: testing lr.type=ielr: Lane Split ... +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:550: sed -n '/^State 0$/,$p' input.output +./reduce.at:550: sed -n '/^State 0$/,$p' input.output ./reduce.at:550: sed -n '/^State 0$/,$p' input.output ./reduce.at:783: sed -n '/^State 0$/,$p' input.output ./reduce.at:550: sed -n '/^State 0$/,$p' input.output -./reduce.at:89: sed 's,.*/$,,' stderr 1>&2 -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=none -Werror --trace=none -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:783: sed -n '/^State 0$/,$p' input.output stderr: - - -input.y: error: 2 nonterminals useless in grammar [-Werror=other] -input.y: error: 3 rules useless in grammar [-Werror=other] -input.y:6.1-11: error: nonterminal useless in grammar: underivable [-Werror=other] - 6 | underivable: indirection; - | ^~~~~~~~~~~ -input.y:7.1-11: error: nonterminal useless in grammar: indirection [-Werror=other] - 7 | indirection: underivable; - | ^~~~~~~~~~~ -input.y:5.15-25: error: rule useless in grammar [-Werror=other] - 5 | exp: useful | underivable; - | ^~~~~~~~~~~ -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -./sets.at:286: sed 's,.*/$,,' stderr 1>&2 +stdout: +./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.cc" --header="#.hh" cxx.y ./reduce.at:783: sed -n '/^State 0$/,$p' input.output -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -193. reduce.at:1027: testing lr.type=canonical-lr: Complex Lane Split ... ./reduce.at:783: sed -n '/^State 0$/,$p' input.output -./reduce.at:550: sed -n '/^State 0$/,$p' input.output -./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=error +./output.at:336: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c ").cc" +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error +./skeletons.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=tmp/skel.c tmp/input-cmd-line.y +49. input.at:2224: ok +./skeletons.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input4.y stderr: -./reduce.at:1027: sed -n '/^State 0$/,$p' input.output -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./reduce.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -input.y: error: 1 nonterminal useless in grammar [-Werror=other] -input.y: error: 1 rule useless in grammar [-Werror=other] -input.y:18.1-6: error: nonterminal useless in grammar: unused [-Werror=other] - 18 | unused - | ^~~~~~ -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:420: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -195. reduce.at:1296: testing lr.type=lalr: Split During Added Lookahead Propagation ... -./reduce.at:1027: sed -n '/^State 0$/,$p' input.output stderr: -194. reduce.at:1296: testing no lr.type: Split During Added Lookahead Propagation ... -196. reduce.at:1296: testing lr.type=ielr: Split During Added Lookahead Propagation ... -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./input.at:2738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,none,other input.y -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +input.y: error: 3 nonterminals useless in grammar [-Werror=other] +input.y: error: 3 rules useless in grammar [-Werror=other] +input.y:11.1-8: error: nonterminal useless in grammar: useless1 [-Werror=other] +input.y:12.1-8: error: nonterminal useless in grammar: useless2 [-Werror=other] +input.y:13.1-8: error: nonterminal useless in grammar: useless3 [-Werror=other] +not-reduced.y: error: 2 nonterminals useless in grammar [-Werror=other] +not-reduced.y: error: 3 rules useless in grammar [-Werror=other] +not-reduced.y:14.1-13: error: nonterminal useless in grammar: not_reachable [-Werror=other] + 14 | not_reachable: useful { /* A not reachable action. */ } + | ^~~~~~~~~~~~~ +not-reduced.y:17.1-14: error: nonterminal useless in grammar: non_productive [-Werror=other] + 17 | non_productive: non_productive useless_token + | ^~~~~~~~~~~~~~ +not-reduced.y:11.6-57: error: rule useless in grammar [-Werror=other] + 11 | | non_productive { /* A non productive action. */ } + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +./output.at:335: ls "(.cc" "(.hh" +stderr: +input.y:2.8-10: error: duplicate directive [-Werror=other] + 2 | %start exp + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %start exp foo + | ^~~ +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +stderr: input.y: error: 9 nonterminals useless in grammar [-Werror=other] input.y: error: 9 rules useless in grammar [-Werror=other] input.y:10.1-8: error: nonterminal useless in grammar: useless1 [-Werror=other] @@ -5643,667 +5894,386 @@ input.y:18.1-8: error: nonterminal useless in grammar: useless9 [-Werror=other] 18 | useless9: '9'; | ^~~~~~~~ -./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:261: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./sets.at:286: sed 's,.*/$,,' stderr 1>&2 +./sets.at:325: sed 's,.*/$,,' stderr 1>&2 +stdout: + +@{.cc +@{.hh +171. sets.at:228: ok +190. reduce.at:1027: testing no lr.type: Complex Lane Split ... +./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y + stderr: -not-reduced.y: error: 2 nonterminals useless in grammar [-Werror=other] -not-reduced.y: error: 3 rules useless in grammar [-Werror=other] -not-reduced.y:14.1-13: error: nonterminal useless in grammar: not_reachable [-Werror=other] - 14 | not_reachable: useful { /* A not reachable action. */ } - | ^~~~~~~~~~~~~ -not-reduced.y:17.1-14: error: nonterminal useless in grammar: non_productive [-Werror=other] - 17 | non_productive: non_productive useless_token - | ^~~~~~~~~~~~~~ -not-reduced.y:11.6-57: error: rule useless in grammar [-Werror=other] - 11 | | non_productive { /* A non productive action. */ } - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -181. reduce.at:452: ok -./reduce.at:146: sed 's,.*/$,,' stderr 1>&2 +stdout: +./output.at:341: ls "[.cc" "[.hh" +./output.at:337: ls "#.cc" "#.hh" +191. reduce.at:1027: testing lr.type=lalr: Complex Lane Split ... +./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.cc" --header="@@.hh" cxx.y +stderr: +stderr: +input.y: error: 2 nonterminals useless in grammar [-Werror=other] +input.y: error: 3 rules useless in grammar [-Werror=other] +input.y:6.1-11: error: nonterminal useless in grammar: underivable [-Werror=other] + 6 | underivable: indirection; + | ^~~~~~~~~~~ +input.y:7.1-11: error: nonterminal useless in grammar: indirection [-Werror=other] + 7 | indirection: underivable; + | ^~~~~~~~~~~ +input.y:5.15-25: error: rule useless in grammar [-Werror=other] + 5 | exp: useful | underivable; + | ^~~~~~~~~~~ +stderr: +stdout: +input.y: error: 1 nonterminal useless in grammar [-Werror=other] +input.y: error: 1 rule useless in grammar [-Werror=other] +input.y:18.1-6: error: nonterminal useless in grammar: unused [-Werror=other] + 18 | unused + | ^~~~~~ +./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].cc" --header="].hh" cxx.y +./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +stderr: +stdout: +./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.cc" --header="@}.hh" cxx.y ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +./input.at:1877: sed 's,.*/$,,' stderr 1>&2 +./reduce.at:89: sed 's,.*/$,,' stderr 1>&2 ./reduce.at:341: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=error -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Wnone,none -Werror --trace=none -161. diagnostics.at:713: ok -./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=error +./reduce.at:146: sed 's,.*/$,,' stderr 1>&2 +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y +stdout: +(.cc +(.hh +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Werror +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none +./skeletons.at:74: cat input-cmd-line.tab.c +165. skeletons.at:166: ok + +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Wnone,none -Werror --trace=none +192. reduce.at:1027: testing lr.type=ielr: Complex Lane Split ... +./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:2741: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,no-all,other input.y +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none +./reduce.at:261: sed 's,.*/$,,' stderr 1>&2 +./output.at:339: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@{.cc" +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:420: sed 's,.*/$,,' stderr 1>&2 +stdout: +stdout: +#.cc +#.hh +[.cc +[.hh +48. input.at:2191: ok +./output.at:342: ls "].cc" "].hh" +./output.at:340: ls "@}.cc" "@}.hh" +./output.at:338: ls "@@.cc" "@@.hh" +193. reduce.at:1027: testing lr.type=canonical-lr: Complex Lane Split ... +./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./output.at:267: cat foo.y +./output.at:335: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "(.cc" +162. skeletons.at:25: ok -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +stderr: +input.y:2.15: error: stray '$' [-Werror=other] +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=none -Werror --trace=none +194. reduce.at:1296: testing no lr.type: Split During Added Lookahead Propagation ... +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stderr: ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +stdout: +./skeletons.at:122: $PREPARSER ./input-cmd-line +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=error +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./output.at:341: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "[.cc" +./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./output.at:337: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "#.cc" +stdout: +stdout: +].cc +].hh +@@.cc +@@.hh +stdout: +@}.cc +@}.hh +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./output.at:272: cat foo.y +./input.at:2733: sed 's,.*/$,,' stderr 1>&2 +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=error +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error ./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +syntax error, unexpected 'a', expecting end of file +./skeletons.at:122: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +125. output.at:267: ok + +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Wnone,none -Werror --trace=none +195. reduce.at:1296: testing lr.type=lalr: Split During Added Lookahead Propagation ... +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -input.y:12.10-32: error: unset value: $$ [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~ -input.y:12.10-12: error: unused value: $1 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.18-20: error: unused value: $3 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.26-28: error: unused value: $5 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] - 13 | b: INT | %empty; - | ^~~~~~ -input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~ -input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~ -input.y:14.10-62: error: unset value: $$ [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:14.22-24: error: unused value: $3 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:14.43-45: error: unused value: $5 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:15.10-49: error: unset value: $$ [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:15.18-20: error: unused value: $3 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:15.30-32: error: unused value: $5 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:16.10-37: error: unset value: $$ [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:16.18-20: error: unused value: $3 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:16.27-29: error: unused value: $5 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:18.10-58: error: unset value: $$ [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:18.10-12: error: unused value: $1 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ -input.y:18.31-33: error: unused value: $3 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ -input.y:18.52-54: error: unused value: $5 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:19.10-72: error: unset value: $$ [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:19.10-12: error: unused value: $1 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.31-33: error: unused value: $3 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:19.66-68: error: unused value: $5 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] - 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; - | ^~~~~~~~~~~~~~~~~~~~ -input.y:22.10-68: error: unset value: $$ [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:22.10-12: error: unused value: $1 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:22.14-16: error: unused value: $2 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:25.23-25: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.10-50: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:25.10-12: error: unused value: $1 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.23-25: error: unused value: $2 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.27-29: error: unused value: $3 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unused value: $4 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.44-46: error: unused value: $5 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:26.23-25: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ -input.y:26.40-42: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ +stderr: +stdout: +./output.at:782: sed -ne 's/#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/include/ast/loc.hh +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=error +./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./output.at:342: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "].cc" +./output.at:340: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@}.cc" +./output.at:338: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@@.cc" +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./reduce.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +126. output.at:272: ok + +196. reduce.at:1296: testing lr.type=ielr: Split During Added Lookahead Propagation ... +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +./output.at:794: sed -ne 's/^#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/x1.hh +161. diagnostics.at:713: ok +./skeletons.at:126: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input-gram.c input-gram.y +16. input.at:784: ok +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror + 197. reduce.at:1296: testing lr.type=canonical-lr: Split During Added Lookahead Propagation ... ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -24. input.at:1074: ok -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./input.at:776: sed 's,.*/$,,' stderr 1>&2 -198. reduce.at:1627: testing no lr.default-reduction ... -./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=none -Werror --trace=none -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Wnone,none -Werror --trace=none -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Wnone,none -Werror --trace=none +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=none -Werror --trace=none stderr: -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error -input.y: warning: 4 shift/reduce conflicts [-Wconflicts-sr] +input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./output.at:641: grep -v // input.gv -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:2746: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror -Wno-error=other input.y -143. output.at:641: ok ./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -199. reduce.at:1627: testing lr.default-reduction=most ... -stderr: +198. reduce.at:1627: testing no lr.default-reduction ... ./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=error +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output stderr: stdout: -./skeletons.at:122: $PREPARSER ./input-cmd-line - -input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stderr: -syntax error, unexpected 'a', expecting end of file -./skeletons.at:122: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:97: sed -n '/^Grammar/q;/^$/!p' input.output -200. reduce.at:1627: testing lr.default-reduction=consistent ... -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=none -Werror --trace=none -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -173. sets.at:315: ok -./skeletons.at:126: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input-gram.c input-gram.y -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:109: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./reduce.at:783: $PREPARSER ./input stderr: input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=none -Werror --trace=none -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output -./input.at:2750: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-error=other -Werror input.y - +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Wnone,none -Werror --trace=none +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none stderr: stdout: -172. sets.at:269: ./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./reduce.at:783: $PREPARSER ./input -./reduce.at:270: sed -n '/^State 0/q;/^$/!p' input.output - ok ./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 +./output.at:806: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x2.cc -M out/=bar/ x2.yy +15. input.at:774: ok + stderr: -stdout: stderr: -syntax error -./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -201. reduce.at:1627: testing lr.default-reduction=accepting ... -./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:298: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./reduce.at:1027: $PREPARSER ./input -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +stdout: +stdout: +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Wnone,none -Werror --trace=none +./reduce.at:550: $PREPARSER ./input +./reduce.at:550: $PREPARSER ./input stderr: stderr: -./skeletons.at:127: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-gram input-gram.c $LIBS stderr: -input.y:12.10-32: error: unset value: $$ [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~ -input.y:12.10-12: error: unused value: $1 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.18-20: error: unused value: $3 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.26-28: error: unused value: $5 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] - 13 | b: INT | %empty; - | ^~~~~~ -input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~ -input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~ -input.y:14.10-62: error: unset value: $$ [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:14.22-24: error: unused value: $3 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:14.43-45: error: unused value: $5 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:15.10-49: error: unset value: $$ [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:15.18-20: error: unused value: $3 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:15.30-32: error: unused value: $5 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:16.10-37: error: unset value: $$ [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:16.18-20: error: unused value: $3 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:16.27-29: error: unused value: $5 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:18.10-58: error: unset value: $$ [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:18.10-12: error: unused value: $1 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ -input.y:18.31-33: error: unused value: $3 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ -input.y:18.52-54: error: unused value: $5 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:19.10-72: error: unset value: $$ [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:19.10-12: error: unused value: $1 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.31-33: error: unused value: $3 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:19.66-68: error: unused value: $5 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] - 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; - | ^~~~~~~~~~~~~~~~~~~~ -input.y:22.10-68: error: unset value: $$ [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:22.10-12: error: unused value: $1 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:22.14-16: error: unused value: $2 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:25.23-25: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.10-50: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:25.10-12: error: unused value: $1 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.23-25: error: unused value: $2 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.27-29: error: unused value: $3 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unused value: $4 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.44-46: error: unused value: $5 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:26.23-25: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ -input.y:26.40-42: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ -syntax error -186. reduce.at:783: stdout: - ok -./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:355: sed -n '/^Grammar/q;/^$/!p' not-reduced.output -./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.cc" --header="(.hh" cxx.y -./reduce.at:434: sed -n '/^Grammar/q;/^$/!p' input.output stderr: -./reduce.at:179: sed -n '/^Grammar/q;/^$/!p' input.output - -stdout: -./reduce.at:392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret reduced.y -./input.at:2754: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror=other -Wno-other input.y -./reduce.at:213: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c - -./input.at:786: sed 's,.*/$,,' stderr 1>&2 -180. reduce.at:406: ./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.cc" --header="#.hh" cxx.y - ok -190. reduce.at:1027: ok -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -202. report.at:37: testing Reports ... -./output.at:335: ls "(.cc" "(.hh" stdout: -202. report.at:37: (.cc -(.hh -stderr: -stderr: - - -./output.at:335: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "(.cc" +input.y:2.16-18: error: useless %printer for type <*> [-Werror=other] stdout: stdout: - skipped (report.at:75) -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +199. reduce.at:1627: testing lr.default-reduction=most ... +./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:783: $PREPARSER ./input ./reduce.at:550: $PREPARSER ./input +./reduce.at:550: $PREPARSER ./input +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Wnone,none -Werror --trace=none +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +stderr: +./reduce.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./output.at:277: cat foo.y +./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 +stdout: stderr: stderr: +./reduce.at:783: $PREPARSER ./input +./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -203. report.at:3123: testing Reports with conflicts ... +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +stderr: stdout: +129. output.at:328: ok -./reduce.at:550: $PREPARSER ./input -stderr: +200. reduce.at:1627: testing lr.default-reduction=consistent ... +./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +./input.at:1124: sed 's,.*/$,,' stderr 1>&2 +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./skeletons.at:127: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-gram input-gram.c $LIBS +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stderr: -./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./output.at:337: ls "#.cc" "#.hh" -stdout: -204. conflicts.at:28: testing Token declaration order ... -stdout: -205. conflicts.at:101: testing Token declaration order: literals vs. identifiers ... -./reduce.at:783: $PREPARSER ./input -184. reduce.at:550: ok -./conflicts.at:130: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -./reduce.at:396: sed 's/not-reduced/reduced/g' not-reduced.c -185. reduce.at:550: ok -203. report.at:3123: stderr: stderr: +syntax error +./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -187. reduce.at:783: ok -stderr: -stdout: -stdout: -./reduce.at:550: $PREPARSER ./input -stdout: -stderr: - skipped (report.at:3132) syntax error ./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:1027: $PREPARSER ./input -stderr: -59. input.at:2719: ok -stdout: -179. reduce.at:312: ok -./reduce.at:550: $PREPARSER ./input -./reduce.at:783: $PREPARSER ./input -stdout: -#.cc -#.hh -./output.at:337: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "#.cc" +./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +172. sets.at:269: ok +173. sets.at:315: ok + stderr: -./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.cc" --header="[.hh" cxx.y stderr: +201. reduce.at:1627: testing lr.default-reduction=accepting ... stdout: -189. reduce.at:783: ok -206. conflicts.at:183: testing Useless associativity warning ... -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence input.y -./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.cc" --header="@}.hh" cxx.y -stderr: +./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stdout: +./reduce.at:1027: $PREPARSER ./input +./reduce.at:1027: $PREPARSER ./input +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=none -Werror --trace=none + +202. report.at:37: testing Reports ... +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error stderr: syntax error -./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -183. reduce.at:550: stderr: - +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y +188. reduce.at:783: ok +184. reduce.at:550: ok +185. reduce.at:550: 189. reduce.at:783: 127. output.at:277: 187. reduce.at:783: ok + ok + ok +182. reduce.at:550: ok ok +40. input.at:1826: ok +183. reduce.at:550: ok -stdout: -192. reduce.at:1027: ok -188. reduce.at:783: ok -stderr: -./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").cc" --header=").hh" cxx.y -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -182. reduce.at:550: ok -208. conflicts.at:275: testing S/R in initial ... -207. conflicts.at:218: testing Useless precedence warning ... -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -stdout: -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stderr: +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=none -Werror --trace=none + +204. conflicts.at:28: testing Token declaration order ... +stderr: +206. conflicts.at:183: testing Useless associativity warning ... +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence input.y stdout: +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./conflicts.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./reduce.at:1027: $PREPARSER ./input +203. report.at:3123: testing Reports with conflicts ... +207. conflicts.at:218: testing Useless precedence warning ... +205. conflicts.at:101: testing Token declaration order: literals vs. identifiers ... +./conflicts.at:130: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y +208. conflicts.at:275: testing S/R in initial ... +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +209. conflicts.at:301: testing %nonassoc and eof ... +./conflicts.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 210. conflicts.at:509: testing parse.error=verbose and consistent errors: lr.type=ielr ... -./reduce.at:1296: $PREPARSER ./input ./conflicts.at:509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 211. conflicts.at:513: testing parse.error=verbose and consistent errors: lr.type=ielr %glr-parser ... +./conflicts.at:513: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +131. output.at:336: ok +stderr: stdout: ./reduce.at:1027: $PREPARSER ./input -212. conflicts.at:518: testing parse.error=verbose and consistent errors: lr.type=ielr c++ ... +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:513: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -209. conflicts.at:301: testing %nonassoc and eof ... stderr: +202. report.at:37: stderr: syntax error -./output.at:340: ls "@}.cc" "@}.hh" -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.cc" --header="@{.hh" cxx.y -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:82: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./output.at:341: ls "[.cc" "[.hh" -213. conflicts.at:523: testing parse.error=verbose and consistent errors: lr.type=ielr java ... -./conflicts.at:523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y ./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -196. reduce.at:1296: ok -stdout: -@}.cc -@}.hh +syntax error +./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (report.at:75) stderr: -[.cc -[.hh -./output.at:340: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@}.cc" -215. conflicts.at:535: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=accepting ... -stdout: -214. conflicts.at:530: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=consistent ... ./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Werror -./conflicts.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./output.at:341: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "[.cc" +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Werror +./conflicts.at:131: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +212. conflicts.at:518: testing parse.error=verbose and consistent errors: lr.type=ielr c++ ... +./conflicts.at:518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +213. conflicts.at:523: testing parse.error=verbose and consistent errors: lr.type=ielr java ... +./conflicts.at:523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./reduce.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./output.at:806: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x2.o out/x2.cc stderr: +./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./conflicts.at:131: cat input.output | sed -n '/^State 0$/,/^State 1$/p' -191. reduce.at:1027: ok -216. conflicts.at:540: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... +./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +186. reduce.at:783: ok + +214. conflicts.at:530: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=consistent ... ./conflicts.at:530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -176. reduce.at:70: ok -stdout: -178. reduce.at:224: ./conflicts.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./conflicts.at:82: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:518: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./conflicts.at:509: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +203. report.at:3123: ./conflicts.at:513: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +190. reduce.at:1027: ok + skipped (report.at:3132) +191. reduce.at:1027: ok +205. conflicts.at:101: ok +./input.at:2738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,none,other input.y +181. reduce.at:452: ok - ok -205. conflicts.at:101: stderr: -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output - ok -217. conflicts.at:546: testing parse.error=verbose and consistent errors: lr.type=canonical-lr parse.lac=full ... -./output.at:336: ls ").cc" ").hh" -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -stdout: -./conflicts.at:546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stdout: -./conflicts.at:509: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -).cc -).hh -stdout: -./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.cc" --header="@@.hh" cxx.y -./reduce.at:1296: $PREPARSER ./input -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -./output.at:336: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c ").cc" -stderr: -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: + +216. conflicts.at:540: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... +./conflicts.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +215. conflicts.at:535: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=accepting ... +./conflicts.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -stdout: -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Werror -./conflicts.at:518: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] +217. conflicts.at:546: testing parse.error=verbose and consistent errors: lr.type=canonical-lr parse.lac=full ... +./conflicts.at:546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 218. conflicts.at:551: testing parse.error=verbose and consistent errors: lr.type=ielr parse.lac=full ... ./conflicts.at:551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].cc" --header="].hh" cxx.y -./output.at:339: ls "@{.cc" "@{.hh" -./conflicts.at:513: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -197. reduce.at:1296: ok -177. reduce.at:120: 219. conflicts.at:558: testing parse.error=verbose and consistent errors: c++ lr.type=canonical-lr parse.lac=full ... -213. conflicts.at:523: ./conflicts.at:558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stdout: stderr: - - ok -./conflicts.at:540: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -221. conflicts.at:622: testing parse.error=verbose and consistent errors: ... -./conflicts.at:622: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -@{.cc -@{.hh input.y:2.1-9: error: useless precedence and associativity for "=" [-Werror=precedence] input.y:4.1-5: error: useless associativity for "*", use %precedence [-Werror=precedence] input.y:5.1-11: error: useless precedence for "(" [-Werror=precedence] -./conflicts.at:530: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - skipped (conflicts.at:523) -./output.at:339: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@{.cc" -stderr: -stdout: - -./reduce.at:1027: $PREPARSER ./input -stderr: -stderr: -./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stdout: -stderr: -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output -./conflicts.at:205: sed 's,.*/$,,' stderr 1>&2 -222. conflicts.at:626: testing parse.error=verbose and consistent errors: %glr-parser ... -220. conflicts.at:564: testing parse.error=verbose and consistent errors: c++ lr.type=ielr parse.lac=full ... -./conflicts.at:564: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none -193. reduce.at:1027: ok -./reduce.at:1627: $PREPARSER ./input - -./conflicts.at:551: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=error -223. conflicts.at:632: testing parse.error=verbose and consistent errors: lr.default-reduction=consistent ... -stderr: -./conflicts.at:626: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./output.at:342: ls "].cc" "].hh" -./conflicts.at:284: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -./conflicts.at:632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stdout: -199. reduce.at:1627: stdout: - ok -./reduce.at:1627: $PREPARSER ./input -].cc -].hh -./output.at:338: ls "@@.cc" "@@.hh" -./output.at:342: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "].cc" -./conflicts.at:546: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -225. conflicts.at:642: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... - -stderr: -./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./conflicts.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -@@.cc -@@.hh +219. conflicts.at:558: testing parse.error=verbose and consistent errors: c++ lr.type=canonical-lr parse.lac=full ... +./conflicts.at:558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: input.y:7.1-9: error: useless precedence and associativity for U [-Werror=precedence] 7 | %nonassoc U @@ -6317,426 +6287,382 @@ input.y:2.1-11: error: useless precedence for Z [-Werror=precedence] 2 | %precedence Z | ^~~~~~~~~~~ -224. conflicts.at:638: testing parse.error=verbose and consistent errors: lr.default-reduction=accepting ... -./conflicts.at:638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./output.at:338: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@@.cc" +./conflicts.at:530: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +213. conflicts.at:523: skipped (conflicts.at:523) +193. reduce.at:1027: ok +192. reduce.at:1027: ok -198. reduce.at:1627: ./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none - ok -226. conflicts.at:647: testing parse.error=verbose and consistent errors: parse.lac=full ... -./conflicts.at:622: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:647: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:558: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:248: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:632: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -227. conflicts.at:651: testing parse.error=verbose and consistent errors: parse.lac=full lr.default-reduction=accepting ... -./conflicts.at:651: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + +221. conflicts.at:622: testing parse.error=verbose and consistent errors: ... +./conflicts.at:622: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +220. conflicts.at:564: testing parse.error=verbose and consistent errors: c++ lr.type=ielr parse.lac=full ... +./conflicts.at:564: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +222. conflicts.at:626: testing parse.error=verbose and consistent errors: %glr-parser ... +./conflicts.at:626: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:205: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:248: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:284: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:551: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:540: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:546: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:558: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./input.at:2741: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,no-all,other input.y stderr: stdout: -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=error -./conflicts.at:84: $PREPARSER ./input -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Wnone,none -Werror --trace=none -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./reduce.at:1627: $PREPARSER ./input +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none stderr: -./conflicts.at:84: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +./reduce.at:1296: $PREPARSER ./input +stdout: +134. output.at:339: ok + +223. conflicts.at:632: testing parse.error=verbose and consistent errors: lr.default-reduction=consistent ... +./conflicts.at:632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +./conflicts.at:622: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./conflicts.at:626: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -204. conflicts.at:28: ok +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y ./conflicts.at:564: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -228. conflicts.at:676: testing LAC: %nonassoc requires splitting canonical LR states ... -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y +130. output.at:335: ok +./reduce.at:179: sed -n '/^Grammar/q;/^$/!p' input.output +./reduce.at:270: sed -n '/^State 0/q;/^$/!p' input.output +./reduce.at:97: sed -n '/^Grammar/q;/^$/!p' input.output +./reduce.at:355: sed -n '/^Grammar/q;/^$/!p' not-reduced.output + +224. conflicts.at:638: testing parse.error=verbose and consistent errors: lr.default-reduction=accepting ... +./conflicts.at:638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -./conflicts.at:638: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:642: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: +./reduce.at:1296: $PREPARSER ./input +stderr: +stdout: +136. output.at:341: ok +stderr: stdout: -./skeletons.at:128: $PREPARSER ./input-gram +132. output.at:337: ok +225. conflicts.at:642: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... +./conflicts.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + +226. conflicts.at:647: testing parse.error=verbose and consistent errors: parse.lac=full ... +./conflicts.at:647: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: -./reduce.at:1627: $PREPARSER ./input +./skeletons.at:128: $PREPARSER ./input-gram +./conflicts.at:632: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: stderr: +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./input.at:2746: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror -Wno-error=other input.y +stderr: +stdout: +137. output.at:342: ok + +227. conflicts.at:651: testing parse.error=verbose and consistent errors: parse.lac=full lr.default-reduction=accepting ... +./conflicts.at:651: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:642: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:638: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: +./reduce.at:434: sed -n '/^Grammar/q;/^$/!p' input.output ./conflicts.at:647: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS syntax error, unexpected 'a', expecting end of file -./conflicts.at:651: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./skeletons.at:128: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -201. reduce.at:1627: ok -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -229. conflicts.at:764: testing Unresolved SR Conflicts ... stderr: -163. skeletons.at:85: ok -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret reduced.y +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +stderr: stdout: +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none ./reduce.at:1627: $PREPARSER ./input -15. input.at:774: ok -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Werror stderr: -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=none -Werror --trace=none -./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -200. reduce.at:1627: stderr: - ok +stdout: +133. output.at:338: ok +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=error +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=error +stderr: stdout: -./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" cxx.y +198. reduce.at:1627: ok +135. output.at:340: ok +197. reduce.at:1296: ok +228. conflicts.at:676: testing LAC: %nonassoc requires splitting canonical LR states ... +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y +24. input.at:1074: ok + + + 230. conflicts.at:887: testing Resolved SR Conflicts ... +229. conflicts.at:764: testing Unresolved SR Conflicts ... ./conflicts.at:898: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y - +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y +stderr: 231. conflicts.at:989: testing %precedence suffices ... ./conflicts.at:1006: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stdout: +./conflicts.at:84: $PREPARSER ./input +./input.at:2750: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-error=other -Werror input.y +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Werror +./conflicts.at:901: cat input.output +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror +231. conflicts.at:989: ok -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y stderr: -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -233. conflicts.at:1096: testing Syntax error in consistent error state: yacc.c ... -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=none -Werror --trace=none 232. conflicts.at:1015: testing %precedence does not suffice ... ./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stdout: +./reduce.at:1627: $PREPARSER ./input + +233. conflicts.at:1096: testing Syntax error in consistent error state: yacc.c ... ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:726: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=error -./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" -./conflicts.at:901: cat input.output +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +stderr: stderr: stdout: stdout: -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.cc -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.hh -./output.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" -230. conflicts.at:887: ok -./reduce.at:1296: $PREPARSER ./input -231. conflicts.at:989: ok +./conflicts.at:368: $PREPARSER ./input '0<0' +./reduce.at:1627: $PREPARSER ./input +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./skeletons.at:128: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:298: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./conflicts.at:651: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y +./reduce.at:213: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./reduce.at:109: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stderr: -syntax error -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -206. conflicts.at:183: ok -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror - -194. reduce.at:1296: stderr: +./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +196. reduce.at:1296: 180. reduce.at:406: ok ok -input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -207. conflicts.at:218: ok -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Wnone,none -Werror --trace=none + +stderr: +stdout: +./conflicts.at:509: $PREPARSER ./input 234. conflicts.at:1096: testing Syntax error in consistent error state: glr.c ... ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -./conflicts.at:774: sed 's,.*/$,,' stderr 1>&2 235. conflicts.at:1096: testing Syntax error in consistent error state: lalr1.cc ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +stderr: +stderr: +stderr: +stderr: +./conflicts.at:84: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +230. conflicts.at:887: ok 236. conflicts.at:1096: testing Syntax error in consistent error state: glr.cc ... ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error -237. conflicts.at:1096: testing Syntax error in consistent error state: glr2.cc ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] - 4 | e: 'e' | %empty; - | ^~~~~~ stderr: stderr: +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -238. conflicts.at:1127: testing Defaulted Conflicted Reduction ... -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y -stderr: -stdout: -stdout: -./reduce.at:1296: $PREPARSER ./input -./conflicts.at:368: $PREPARSER ./input '0<0' -stderr: -stderr: -syntax error -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:12.3-18: error: rule useless in parser due to conflicts [-Werror=other] stderr: -./conflicts.at:288: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:368: $PREPARSER ./input '0<0<0' -195. reduce.at:1296: ok -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error -./conflicts.at:1033: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Wnone,none -Werror --trace=none +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Wnone,none -Werror --trace=none stderr: +stdout: +./conflicts.at:530: $PREPARSER ./input ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror -syntax error, unexpected '<' -./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./conflicts.at:368: $PREPARSER ./input '0>0' -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none +./reduce.at:396: sed 's/not-reduced/reduced/g' not-reduced.c stderr: -./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror -./conflicts.at:368: $PREPARSER ./input '0>0>0' -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror +syntax error, unexpected end of file +./conflicts.at:509: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +199. reduce.at:1627: ok +163. skeletons.at:85: ok + stderr: stderr: stdout: -syntax error, unexpected '>' -./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: ./conflicts.at:535: $PREPARSER ./input stderr: -syntax error, unexpected end of file, expecting 'a' or 'b' -./conflicts.at:535: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:368: $PREPARSER ./input '0<0>0' -./conflicts.at:731: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -239. conflicts.at:1264: testing %expect not enough ... -./conflicts.at:1273: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -stderr: +./conflicts.at:551: $PREPARSER ./input +stdout: stderr: -16. input.at:784: input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] +237. conflicts.at:1096: testing Syntax error in consistent error state: glr2.cc ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +238. conflicts.at:1127: testing Defaulted Conflicted Reduction ... +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y +./conflicts.at:546: $PREPARSER ./input +input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] - ok stderr: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -215. conflicts.at:535: ok -syntax error, unexpected '>' -./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror +stderr: +stdout: +./conflicts.at:540: $PREPARSER ./input +./conflicts.at:726: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.default-reduction=consistent -o input.c input.y -239. conflicts.at:1264: ok -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error +./conflicts.at:1033: sed 's,.*/$,,' stderr 1>&2 +stderr: +./input.at:2754: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror=other -Wno-other input.y +./conflicts.at:368: $PREPARSER ./input '0<0<0' +syntax error, unexpected end of file, expecting 'a' or 'b' +./conflicts.at:530: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +201. reduce.at:1627: 204. conflicts.at:28: ok +200. reduce.at:1627: ok + ok +179. reduce.at:312: ok + -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error stderr: -input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:4.6-8: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none -stderr: -stderr: -240. conflicts.at:1284: testing %expect right ... -stdout: 241. conflicts.at:1301: testing %expect too much ... -stdout: -242. conflicts.at:1321: testing %expect with reduce conflicts ... -./conflicts.at:509: $PREPARSER ./input -./conflicts.at:551: $PREPARSER ./input -./conflicts.at:1138: sed 's,.*/$,,' stderr 1>&2 +239. conflicts.at:1264: testing %expect not enough ... +./conflicts.at:1273: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +240. conflicts.at:1284: testing %expect right ... ./conflicts.at:1293: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +242. conflicts.at:1321: testing %expect with reduce conflicts ... ./conflicts.at:1330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -stderr: -stderr: stderr: -syntax error, unexpected end of file -./conflicts.at:509: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] -input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error -210. conflicts.at:509: ok -241. conflicts.at:1301: ok -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -242. conflicts.at:1321: ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none +stdout: +./conflicts.at:513: $PREPARSER ./input +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=none -Werror --trace=none +241. conflicts.at:1301: 239. conflicts.at:1264: ok ok -./conflicts.at:372: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -218. conflicts.at:551: ok -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error -stderr: - +242. conflicts.at:1321: ok +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=none -Werror --trace=none stderr: - -stdout: input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./output.at:782: sed -ne 's/#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/include/ast/loc.hh -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -./output.at:794: sed -ne 's/^#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/x1.hh -./conflicts.at:780: cat input.output -240. conflicts.at:1284: ok -243. conflicts.at:1341: testing %expect in grammar rule not enough ... -./conflicts.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -245. conflicts.at:1377: testing %expect in grammar rules ... -./conflicts.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -rall input.y -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none -./output.at:806: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x2.cc -M out/=bar/ x2.yy -stderr: -229. conflicts.at:764: ./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error + +240. conflicts.at:1284: 243. conflicts.at:1341: testing %expect in grammar rule not enough ... ok +./conflicts.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y 244. conflicts.at:1360: testing %expect in grammar rule right ... -stdout: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none -./conflicts.at:642: $PREPARSER ./input ./conflicts.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -syntax error, unexpected end of file, expecting 'a' -./conflicts.at:642: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -246. conflicts.at:1396: testing %expect in grammar rule too much ... -./conflicts.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none -stderr: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none - -stdout: +245. conflicts.at:1377: testing %expect in grammar rules ... +./conflicts.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -rall input.y 243. conflicts.at:1341: ok -225. conflicts.at:642: 232. conflicts.at:1015: ok -./conflicts.at:632: $PREPARSER ./input - ok -stderr: -syntax error, unexpected 'b' -./conflicts.at:632: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -246. conflicts.at:1396: ok +246. conflicts.at:1396: testing %expect in grammar rule too much ... +./conflicts.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +246. conflicts.at:1396: ok +247. conflicts.at:1415: testing %expect-rr in grammar rule ... +./conflicts.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +244. conflicts.at:1360: ok +245. conflicts.at:1377: + ok -247. conflicts.at:1415: testing %expect-rr in grammar rule ... 248. conflicts.at:1440: testing %expect-rr too much in grammar rule ... -208. conflicts.at:275: ok -./conflicts.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -stderr: - -stdout: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none 249. conflicts.at:1469: testing %expect-rr not enough in grammar rule ... ./conflicts.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none -./conflicts.at:540: $PREPARSER ./input -250. conflicts.at:1498: testing %prec with user string ... -244. conflicts.at:1360: ok +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 stderr: -./conflicts.at:1507: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./output.at:806: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x2.o out/x2.cc -251. conflicts.at:1515: testing %no-default-prec without %prec ... -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y -syntax error, unexpected end of file, expecting 'a' or 'b' -./conflicts.at:540: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -223. conflicts.at:632: ok -248. conflicts.at:1440: stdout: - ok -./conflicts.at:647: $PREPARSER ./input - -249. conflicts.at:1469: ok stderr: -252. conflicts.at:1544: testing %no-default-prec with %prec ... -./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./conflicts.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stdout: -216. conflicts.at:540: ok -245. conflicts.at:1377: ok -syntax error, unexpected 'b' +syntax error, unexpected end of file, expecting 'a' or 'b' +syntax error, unexpected end of file, expecting 'b' +syntax error, unexpected end of file, expecting 'a' or 'b' +./conflicts.at:546: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:540: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:535: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +63. input.at:2883: ok +syntax error, unexpected end of file, expecting 'b' +./conflicts.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +210. conflicts.at:509: ok +249. conflicts.at:1469: ok +248. conflicts.at:1440: ok +250. conflicts.at:1498: testing %prec with user string ... +./conflicts.at:1507: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +247. conflicts.at:1415: ok + -./conflicts.at:647: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:622: $PREPARSER ./input -stderr: stderr: -syntax error, unexpected 'b' -./conflicts.at:622: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stdout: +177. reduce.at:120: ok +251. conflicts.at:1515: testing %no-default-prec without %prec ... stderr: stdout: - +176. reduce.at:70: ./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y + ok +./conflicts.at:622: $PREPARSER ./input +252. conflicts.at:1544: testing %no-default-prec with %prec ... +./conflicts.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 253. conflicts.at:1568: testing %default-prec ... -226. conflicts.at:647: ok -stdout: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none ./conflicts.at:1584: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:546: $PREPARSER ./input -247. conflicts.at:1415: ok -./conflicts.at:530: $PREPARSER ./input 254. conflicts.at:1592: testing Unreachable States After Conflict Resolution ... ./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y -stderr: +255. conflicts.at:1855: testing Solved conflicts report for multiple reductions in a state ... +./conflicts.at:1881: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y stderr: -./conflicts.at:1145: cat input.output -./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -syntax error, unexpected end of file, expecting 'a' or 'b' -syntax error, unexpected end of file, expecting 'b' -221. conflicts.at:622: ok -./conflicts.at:546: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:530: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] +input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] +stderr: +250. conflicts.at:1498: ok +stdout: +178. reduce.at:224: ok -255. conflicts.at:1855: testing Solved conflicts report for multiple reductions in a state ... -./conflicts.at:1881: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y -257. conflicts.at:2299: testing %expect-rr non GLR ... 256. conflicts.at:1935: testing %nonassoc error actions for multiple reductions in a state ... -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 1.y -217. conflicts.at:546: ok -238. conflicts.at:1127: ok -259. counterexample.at:43: testing Unifying S/R ... -./counterexample.at:55: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -258. conflicts.at:2331: testing -W versus %expect and %expect-rr ... +257. conflicts.at:2299: testing %expect-rr non GLR ... ./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -214. conflicts.at:530: ok -250. conflicts.at:1498: ok +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 1.y -252. conflicts.at:1544: ok ./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror -stderr: -253. conflicts.at:1568: stdout: - ok -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret sr-rr.y - -261. counterexample.at:144: testing S/R Conflict with Nullable Symbols ... -./counterexample.at:157: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -260. counterexample.at:83: testing Deep Unifying S/R ... -./conflicts.at:638: $PREPARSER ./input -./counterexample.at:95: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none - -stderr: +253. conflicts.at:1568: ok +252. conflicts.at:1544: ok ./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Werror - - - -./conflicts.at:1096: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS - -syntax error, unexpected end of file, expecting 'a' -./conflicts.at:638: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -263. counterexample.at:254: testing Non-unifying Unambiguous S/R ... -262. counterexample.at:207: testing Non-unifying Ambiguous S/R ... -./counterexample.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:265: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +258. conflicts.at:2331: testing -W versus %expect and %expect-rr ... +259. counterexample.at:43: testing Unifying S/R ... +./counterexample.at:55: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret sr-rr.y stderr: -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Werror input.y:7.5-7: warning: rule useless in parser due to conflicts [-Wother] input.y:11.5-7: warning: rule useless in parser due to conflicts [-Wother] input.y:17.11-26: warning: rule useless in parser due to conflicts [-Wother] input.y:18.11-26: warning: rule useless in parser due to conflicts [-Wother] input.y:19.11-26: warning: rule useless in parser due to conflicts [-Wother] ./conflicts.at:1882: cat input.output | sed -n '/^State 0$/,/^State 1$/p' + + +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Werror +260. counterexample.at:83: testing Deep Unifying S/R ... +./counterexample.at:95: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +261. counterexample.at:144: testing S/R Conflict with Nullable Symbols ... +./counterexample.at:157: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Werror stderr: -224. conflicts.at:638: ok input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] Example: A . B C @@ -6749,17 +6675,40 @@ `-> 1: a x `-> 3: A . `-> 6: B C input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] -268. counterexample.at:488: testing Cex Search Prepend ... ./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Werror stderr: -264. counterexample.at:298: testing S/R after first token ... -266. counterexample.at:399: testing Non-unifying R/R LR(1) conflict ... -./counterexample.at:409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: -input.y: error: 4 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:1.1-5: error: useless precedence and associativity for '+' [-Werror=precedence] -input.y:2.1-5: error: useless precedence and associativity for '*' [-Werror=precedence] +input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: A . B + Shift derivation + s + `-> 2: A xby + `-> 9: . B + Reduce derivation + s + `-> 1: ax by + `-> 3: A x `-> 6: B y + `-> 4: %empty . `-> 6: %empty +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + First example: A X . B Y $end + Shift derivation + $accept + `-> 0: s $end + `-> 2: A xby + `-> 10: X xby Y + `-> 9: . B + Second example: A X . B y $end + Reduce derivation + $accept + `-> 0: s $end + `-> 1: ax by + `-> 3: A x `-> 6: B y + `-> 5: X x + `-> 4: %empty . +input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] Example: A . B C @@ -6790,67 +6739,31 @@ `-> 7: A . `-> 10: B C input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -265. counterexample.at:363: testing Unifying R/R counterexample ... -./counterexample.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:314: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -267. counterexample.at:441: testing Non-unifying R/R LR(2) conflict ... -./counterexample.at:451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:55: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -255. conflicts.at:1855: ok stderr: +stdout: +./conflicts.at:774: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 stderr: -./counterexample.at:95: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:12.5-20: error: rule useless in parser due to conflicts [-Werror=other] -input.y:20.5-20: error: rule useless in parser due to conflicts [-Werror=other] -input.y:21.4: error: rule useless in parser due to conflicts [-Werror=other] -input.y:25.14: error: rule useless in parser due to conflicts [-Werror=other] -input.y:25.16: error: rule useless in parser due to conflicts [-Werror=other] -input.y:31.5-7: error: rule useless in parser due to conflicts [-Werror=other] -input.y:32.4: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:1531: sed 's,.*/$,,' stderr 1>&2 +stderr: +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +syntax error, unexpected '<' +syntax error, unexpected end of file +./conflicts.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +214. conflicts.at:530: ok -input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: A . B - Shift derivation - s - `-> 2: A xby - `-> 9: . B - Reduce derivation - s - `-> 1: ax by - `-> 3: A x `-> 6: B y - `-> 4: %empty . `-> 6: %empty -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - First example: A X . B Y $end - Shift derivation - $accept - `-> 0: s $end - `-> 2: A xby - `-> 10: X xby Y - `-> 9: . B - Second example: A X . B y $end - Reduce derivation - $accept - `-> 0: s $end - `-> 1: ax by - `-> 3: A x `-> 6: B y - `-> 5: X x - `-> 4: %empty . -input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Werror -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Werror -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error -./conflicts.at:1638: sed 's,.*/$,,' stderr 1>&2 -./counterexample.at:157: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: stderr: - +stderr: +stdout: +stdout: +stdout: +./conflicts.at:632: $PREPARSER ./input +./conflicts.at:638: $PREPARSER ./input +262. counterexample.at:207: testing Non-unifying Ambiguous S/R ... +./counterexample.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:642: $PREPARSER ./input +stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] First example: B . C $end @@ -6868,6 +6781,70 @@ `-> 5: b cd `-> 7: B . `-> 8: C D input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +./output.at:835: $CXX $CPPFLAGS $CXXFLAGS -Iout/ $LDFLAGS -o parser out/x[12].o main.cc $LIBS +stderr: +syntax error, unexpected 'b' +./conflicts.at:622: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./counterexample.at:95: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:55: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:157: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +215. conflicts.at:535: ok +218. conflicts.at:551: 217. conflicts.at:546: ok + ok +255. conflicts.at:1855: ok +216. conflicts.at:540: ok + + + + + +stderr: +input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:4.6-8: error: rule useless in parser due to conflicts [-Werror=other] +stderr: +stderr: +stdout: +stderr: +1.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +./conflicts.at:647: $PREPARSER ./input +input.y: error: 4 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:1.1-5: error: useless precedence and associativity for '+' [-Werror=precedence] +input.y:2.1-5: error: useless precedence and associativity for '*' [-Werror=precedence] +stderr: +stderr: +263. counterexample.at:254: testing Non-unifying Unambiguous S/R ... +./counterexample.at:265: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +265. counterexample.at:363: testing Unifying R/R counterexample ... +sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +264. counterexample.at:298: testing S/R after first token ... +./counterexample.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:314: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stdout: +267. counterexample.at:441: testing Non-unifying R/R LR(2) conflict ... +./counterexample.at:451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +266. counterexample.at:399: testing Non-unifying R/R LR(1) conflict ... +./counterexample.at:409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:626: $PREPARSER ./input +stderr: +stderr: +input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example: A b . + First reduce derivation + a + `-> 1: A b . + Second reduce derivation + a + `-> 1: A b + `-> 3: b . +input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] First example: A . A B $end @@ -6885,27 +6862,8 @@ `-> 1: t `-> 3: x `-> 3: x `-> 5: A `-> 5: A . -./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr ./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr stderr: -1.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=error -./counterexample.at:220: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -269. counterexample.at:550: testing R/R cex with prec ... -./counterexample.at:265: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:2307: sed 's,.*/$,,' stderr 1>&2 -270. counterexample.at:610: testing Null nonterminals ... -./counterexample.at:621: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example A . B C - Shift derivation s -> [ y -> [ A . B ] c -> [ C ] ] - Reduce derivation s -> [ a -> [ A . ] x -> [ B C ] ] -input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -stderr: input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] First example: D . A $end @@ -6922,41 +6880,84 @@ `-> 6: D . input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=error stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example A . B C - Shift derivation s -> [ ac -> [ A ac -> [ b -> [ . B ] ] C ] ] - Reduce derivation s -> [ a -> [ A . ] bc -> [ B C ] ] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example A A . B B C C - Shift derivation s -> [ ac -> [ A ac -> [ A ac -> [ b -> [ . b -> [ B B ] ] ] C ] C ] ] - Reduce derivation s -> [ a -> [ A a -> [ A . ] ] bc -> [ B bc -> [ B C ] C ] ] -input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] -259. counterexample.at:43: ok -./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./counterexample.at:409: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: A b . - First reduce derivation +input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + Example: b . A X X Y + Shift derivation a - `-> 1: A b . - Second reduce derivation + `-> 2: s + `-> 7: b . xx y + `-> 9: A X X `-> 11: Y + Reduce derivation a - `-> 1: A b - `-> 3: b . -input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] -./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr + `-> 1: r t + `-> 3: b . `-> 6: A x xy + `-> 8: X `-> 10: X Y +input.y: warning: shift/reduce conflict on token X [-Wcounterexamples] + First example: A X . X + Shift derivation + a + `-> 1: t + `-> 5: A xx + `-> 9: X . X + Second example: X . X xy + Reduce derivation + a + `-> 1: x t + `-> 8: X . `-> 6: X xy +input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] +input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error +./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=error stderr: stderr: +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y stderr: -sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +59. input.at:2719: syntax error, unexpected end of file, expecting 'a' +./conflicts.at:642: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected end of file, expecting 'a' + ok +syntax error, unexpected 'b' +207. conflicts.at:218: 206. conflicts.at:183: ok + ok +./conflicts.at:638: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:632: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./counterexample.at:220: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +211. conflicts.at:513: ok + + + + +268. counterexample.at:488: testing Cex Search Prepend ... +./counterexample.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +269. counterexample.at:550: testing R/R cex with prec ... +270. counterexample.at:610: testing Null nonterminals ... +./counterexample.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:621: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +271. counterexample.at:797: testing Non-unifying Prefix Share ... +./counterexample.at:810: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] + Example: H i J . J J + Shift derivation + s + `-> 2: a J + `-> 3: H i J . J + Reduce derivation + s + `-> 1: a + `-> 3: H i J J + `-> 5: i J . +input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] Example: N A . B C @@ -6985,54 +6986,67 @@ `-> 5: N a B `-> 7: A . input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - Example: b . A X X Y - Shift derivation - a - `-> 2: s - `-> 7: b . xx y - `-> 9: A X X `-> 11: Y - Reduce derivation - a - `-> 1: r t - `-> 3: b . `-> 6: A x xy - `-> 8: X `-> 10: X Y -input.y: warning: shift/reduce conflict on token X [-Wcounterexamples] - First example: A X . X - Shift derivation - a - `-> 1: t - `-> 5: A xx - `-> 9: X . X - Second example: X . X xy - Reduce derivation - a - `-> 1: x t - `-> 8: X . `-> 6: X xy -input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] -input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:372: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y ./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -260. counterexample.at:83: ok stderr: -input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example A . B - Shift derivation s -> [ A xby -> [ . B ] ] - Reduce derivation s -> [ ax -> [ A x -> [ . ] ] by -> [ B y -> [ ] ] ] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - First example A X . B Y $end - Shift derivation $accept -> [ s -> [ A xby -> [ X xby -> [ . B ] Y ] ] $end ] - Second example A X . B y $end - Reduce derivation $accept -> [ s -> [ ax -> [ A x -> [ X x -> [ . ] ] ] by -> [ B y ] ] $end ] -input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none +input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] + Example: B . b c + First reduce derivation + S + `-> 1: B C + `-> 6: A b A `-> 7: A c A + `-> 3: B . `-> 6: %empty `-> 7: %empty `-> 7: %empty + Second reduce derivation + S + `-> 1: B C + `-> 7: A c A + `-> 3: B `-> 7: %empty + `-> 6: A b A + `-> 5: %empty . `-> 6: %empty +input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] + Example: C . c b + First reduce derivation + S + `-> 2: C B + `-> 7: A c A `-> 6: A b A + `-> 4: C . `-> 7: %empty `-> 6: %empty `-> 6: %empty + Second reduce derivation + S + `-> 2: C B + `-> 6: A b A + `-> 4: C `-> 6: %empty + `-> 7: A c A + `-> 5: %empty . `-> 7: %empty +./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr ./conflicts.at:2354: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1138: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:2307: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1531: sed 's,.*/$,,' stderr 1>&2 stderr: +stderr: +syntax error, unexpected 'b' +./conflicts.at:647: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected 'b' +./conflicts.at:626: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./counterexample.at:314: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:368: $PREPARSER ./input '0>0' +./counterexample.at:265: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:372: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:409: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +221. conflicts.at:622: ok +stderr: +input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:12.5-20: error: rule useless in parser due to conflicts [-Werror=other] +input.y:20.5-20: error: rule useless in parser due to conflicts [-Werror=other] +input.y:21.4: error: rule useless in parser due to conflicts [-Werror=other] +input.y:25.14: error: rule useless in parser due to conflicts [-Werror=other] +input.y:25.16: error: rule useless in parser due to conflicts [-Werror=other] +input.y:31.5-7: error: rule useless in parser due to conflicts [-Werror=other] +input.y:32.4: error: rule useless in parser due to conflicts [-Werror=other] +stderr: bison (GNU Bison) 3.8.2 init: 0.000000 # state items: 26 @@ -7311,13 +7325,131 @@ `-> 13: %empty . -./counterexample.at:314: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:499: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -261. counterexample.at:144: ok +272. counterexample.at:842: testing Deep Null Unifying ... +./counterexample.at:854: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] + Example: A a . D + Shift derivation + s + `-> 1: A a d + `-> 6: . D + Reduce derivation + s + `-> 2: A a a d + `-> 3: b `-> 6: D + `-> 4: c + `-> 5: %empty . +./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr stderr: +stdout: +./conflicts.at:518: $PREPARSER ./input stderr: stdout: +./conflicts.at:651: $PREPARSER ./input +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./counterexample.at:499: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:562: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:810: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +225. conflicts.at:642: ok +223. conflicts.at:632: 224. conflicts.at:638: ok + ok + + + +273. counterexample.at:884: testing Deep Null Non-unifying ... +./counterexample.at:896: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +274. synclines.at:194: testing Prologue syncline ... +./synclines.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +275. synclines.at:214: testing %union syncline ... +./synclines.at:214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Wnone,none -Werror --trace=none +./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c syncline.c +./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c syncline.c +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] + First example: A a . D $end + Shift derivation + $accept + `-> 0: s $end + `-> 1: A a d + `-> 6: . D + Second example: A a . D E $end + Reduce derivation + $accept + `-> 0: s $end + `-> 2: A a a d E + `-> 3: b `-> 6: D + `-> 4: c + `-> 5: %empty . +./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./conflicts.at:1638: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1959: sed 's,.*/$,,' stderr 1>&2 +stderr: +stderr: +stderr: +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example A . B C + Shift derivation s -> [ ac -> [ A ac -> [ b -> [ . B ] ] C ] ] + Reduce derivation s -> [ a -> [ A . ] bc -> [ B C ] ] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example A A . B B C C + Shift derivation s -> [ ac -> [ A ac -> [ A ac -> [ b -> [ . b -> [ B B ] ] ] C ] C ] ] + Reduce derivation s -> [ a -> [ A a -> [ A . ] ] bc -> [ B bc -> [ B C ] C ] ] +input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] +input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example A . B + Shift derivation s -> [ A xby -> [ . B ] ] + Reduce derivation s -> [ ax -> [ A x -> [ . ] ] by -> [ B y -> [ ] ] ] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + First example A X . B Y $end + Shift derivation $accept -> [ s -> [ A xby -> [ X xby -> [ . B ] Y ] ] $end ] + Second example A X . B y $end + Reduce derivation $accept -> [ s -> [ ax -> [ A x -> [ X x -> [ . ] ] ] by -> [ B y ] ] $end ] +input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example A . B C + Shift derivation s -> [ y -> [ A . B ] c -> [ C ] ] + Reduce derivation s -> [ a -> [ A . ] x -> [ B C ] ] +input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] +stderr: +./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected end of file +syntax error, unexpected end of file +./counterexample.at:854: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:651: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:518: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +226. conflicts.at:647: ok +222. conflicts.at:626: ok + + +276. synclines.at:237: testing %union name syncline ... +./synclines.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +277. synclines.at:264: testing Postprologue syncline ... +./synclines.at:264: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c syncline.c +./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c syncline.c +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=error +./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] First example B . C $end @@ -7325,122 +7457,41 @@ Second example B . C D $end Reduce derivation $accept -> [ g -> [ x -> [ b -> [ B . ] cd -> [ C D ] ] ] $end ] input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] -./conflicts.at:651: $PREPARSER ./input -./conflicts.at:1959: sed 's,.*/$,,' stderr 1>&2 - -stdout: +./counterexample.at:896: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=error +./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error ./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr stderr: -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=error -./conflicts.at:732: $PREPARSER ./input -syntax error, unexpected end of file -./conflicts.at:651: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: +stderr: +input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] + First example D . A $end + First reduce derivation $accept -> [ s -> [ a -> [ D . ] A ] $end ] + Second example B D . A $end + Second reduce derivation $accept -> [ s -> [ B b -> [ D . ] A ] $end ] +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] First example A . A B $end Shift derivation $accept -> [ s -> [ t -> [ y -> [ A . A B ] ] ] $end ] Second example A . A $end Reduce derivation $accept -> [ s -> [ s -> [ t -> [ x -> [ A . ] ] ] t -> [ x -> [ A ] ] ] $end ] -syntax error, unexpected 'a', expecting 'b' -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=error -./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./conflicts.at:732: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Wnone,none -Werror --trace=none -271. counterexample.at:797: testing Non-unifying Prefix Share ... -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Wnone,none -Werror --trace=none -stderr: -262. counterexample.at:207: ok - -./counterexample.at:810: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -272. counterexample.at:842: testing Deep Null Unifying ... -stderr: -stderr: -227. conflicts.at:651: ok -stderr: -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y -stdout: input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] Example A b . First reduce derivation a -> [ A b . ] Second reduce derivation a -> [ A b -> [ b . ] ] input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] -stderr: -./conflicts.at:372: $PREPARSER ./input '0<0' -input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] - Example: B . b c - First reduce derivation - S - `-> 1: B C - `-> 6: A b A `-> 7: A c A - `-> 3: B . `-> 6: %empty `-> 7: %empty `-> 7: %empty - Second reduce derivation - S - `-> 1: B C - `-> 7: A c A - `-> 3: B `-> 7: %empty - `-> 6: A b A - `-> 5: %empty . `-> 6: %empty -input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] - Example: C . c b - First reduce derivation - S - `-> 2: C B - `-> 7: A c A `-> 6: A b A - `-> 4: C . `-> 7: %empty `-> 6: %empty `-> 6: %empty - Second reduce derivation - S - `-> 2: C B - `-> 6: A b A - `-> 4: C `-> 6: %empty - `-> 7: A c A - `-> 5: %empty . `-> 7: %empty -./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./counterexample.at:854: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] - First example D . A $end - First reduce derivation $accept -> [ s -> [ a -> [ D . ] A ] $end ] - Second example B D . A $end - Second reduce derivation $accept -> [ s -> [ B b -> [ D . ] A ] $end ] -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example N A . B C - Shift derivation s -> [ n -> [ N b -> [ A . B C ] ] ] - Reduce derivation s -> [ n -> [ N a -> [ A . ] B ] C ] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example N N A . B D C - Shift derivation s -> [ n -> [ N n -> [ N b -> [ A . B D ] ] C ] ] - Reduce derivation s -> [ n -> [ N n -> [ N a -> [ A . ] B ] D ] C ] -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -263. counterexample.at:254: ok -stderr: -./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./conflicts.at:372: $PREPARSER ./input '0<0<0' -268. counterexample.at:488: ok -stderr: -./counterexample.at:562: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -syntax error, unexpected '<', expecting end of file -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -273. counterexample.at:884: testing Deep Null Non-unifying ... - -265. counterexample.at:363: ok -266. counterexample.at:399: - ok -./counterexample.at:896: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Wnone,none -Werror --trace=none -stderr: - -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=none -Werror --trace=none input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] Example b . A X X Y @@ -7453,103 +7504,43 @@ Reduce derivation a -> [ x -> [ X . ] t -> [ X xy ] ] input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./conflicts.at:372: $PREPARSER ./input '0>0' -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none -276. synclines.at:237: testing %union name syncline ... -./synclines.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -275. synclines.at:214: testing %union syncline ... +./conflicts.at:368: $PREPARSER ./input '0>0>0' +259. counterexample.at:43: 212. conflicts.at:518: ok +260. counterexample.at:83: ok + ok +227. conflicts.at:651: ok -./synclines.at:214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -274. synclines.at:194: testing Prologue syncline ... -./synclines.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -264. counterexample.at:298: ok -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=none -Werror --trace=none -./conflicts.at:372: $PREPARSER ./input '0>0>0' -277. synclines.at:264: testing Postprologue syncline ... -./synclines.at:264: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] - Example: A a . D - Shift derivation - s - `-> 1: A a d - `-> 6: . D - Reduce derivation - s - `-> 2: A a a d - `-> 3: b `-> 6: D - `-> 4: c - `-> 5: %empty . -./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ -279. synclines.at:310: testing Epilogue syncline ... -./synclines.at:310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -o input.c input.y -Werror -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] - Example: H i J . J J - Shift derivation - s - `-> 2: a J - `-> 3: H i J . J - Reduce derivation - s - `-> 1: a - `-> 3: H i J J - `-> 5: i J . -input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] + +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none 278. synclines.at:291: testing Action syncline ... +279. synclines.at:310: testing Epilogue syncline ... ./synclines.at:291: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./counterexample.at:854: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:372: $PREPARSER ./input '0<0>0' -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=none -Werror --trace=none +./synclines.at:310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +280. synclines.at:327: testing %code top syncline ... +./synclines.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +281. synclines.at:346: testing %destructor syncline ... +./synclines.at:346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Wnone,none -Werror --trace=none +./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c syncline.c +./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c syncline.c +./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c syncline.c +./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c syncline.c stderr: +stdout: +./conflicts.at:558: $PREPARSER ./input stderr: stdout: -280. synclines.at:327: testing %code top syncline ... -./conflicts.at:1651: cat input.output -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] - First example: A a . D $end - Shift derivation - $accept - `-> 0: s $end - `-> 1: A a d - `-> 6: . D - Second example: A a . D E $end - Reduce derivation - $accept - `-> 0: s $end - `-> 2: A a a d E - `-> 3: b `-> 6: D - `-> 4: c - `-> 5: %empty . -./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./synclines.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:564: $PREPARSER ./input +./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=error +./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=error +stderr: stderr: -./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c syncline.c -syntax error, unexpected '>', expecting end of file -./counterexample.at:810: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -131. output.at:336: ok -./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c syncline.c -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./conflicts.at:1836: cat input.y >> input-keep.y -./conflicts.at:381: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-keep.y -./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c syncline.c input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] Example B . b c @@ -7559,29 +7550,28 @@ Example C . c b First reduce derivation S -> [ C -> [ A -> [ C . ] c A -> [ ] ] B -> [ A -> [ ] b A -> [ ] ] ] Second reduce derivation S -> [ C B -> [ A -> [ C -> [ A -> [ . ] c A -> [ ] ] ] b A -> [ ] ] ] -./counterexample.at:896: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 2.y -./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c syncline.c -./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c syncline.c -./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c syncline.c -251. conflicts.at:1515: ok -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -stderr: -stdout: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example N A . B C + Shift derivation s -> [ n -> [ N b -> [ A . B C ] ] ] + Reduce derivation s -> [ n -> [ N a -> [ A . ] B ] C ] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example N N A . B D C + Shift derivation s -> [ n -> [ N n -> [ N b -> [ A . B D ] ] C ] ] + Reduce derivation s -> [ n -> [ N n -> [ N a -> [ A . ] B ] D ] C ] +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] + Example H i J . J J + Shift derivation s -> [ a -> [ H i J . J ] J ] + Reduce derivation s -> [ a -> [ H i -> [ i J . ] J J ] ] +input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] stderr: syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c syncline.c -./conflicts.at:626: $PREPARSER ./input -stderr: -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples - -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=none -Werror --trace=none -./synclines.at:254: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7608,15 +7598,23 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +262. counterexample.at:207: ok +261. counterexample.at:144: ok + + stderr: -syntax error, unexpected 'b' -269. counterexample.at:550: ok -stdout: -./conflicts.at:626: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syncline.c:4: #error "4" -stderr: -stderr: -./synclines.at:254: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] + 4 | e: 'e' | %empty; + | ^~~~~~ +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Wnone,none -Werror --trace=none +282. synclines.at:370: testing %printer syncline ... +./synclines.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +283. synclines.at:440: testing syncline escapes: yacc.c ... +./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none +./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c syncline.c stderr: syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" @@ -7649,12 +7647,43 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +stderr: +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] + First example A a . D $end + Shift derivation $accept -> [ s -> [ A a d -> [ . D ] ] $end ] + Second example A a . D E $end + Reduce derivation $accept -> [ s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] E ] $end ] +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] + Example A a . D + Shift derivation s -> [ A a d -> [ . D ] ] + Reduce derivation s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] ] +stderr: +syntax error, unexpected '>' +stderr: stderr: +syntax error, unexpected end of file, expecting 'b' +syntax error, unexpected end of file, expecting 'b' +stderr: +./conflicts.at:564: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./conflicts.at:558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" + | ^~~~~ +263. counterexample.at:254: ./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7681,12 +7710,132 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c input.c +syncline.c:4: #error "4" + ok +./synclines.at:254: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; + + # 1. Remove useless lines. + + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF + +./synclines.at:214: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +265. counterexample.at:363: ok +266. counterexample.at:399: ok + + + +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none +284. synclines.at:440: testing syncline escapes: glr.c ... +./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 +285. synclines.at:440: testing syncline escapes: lalr1.cc ... +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 +286. synclines.at:440: testing syncline escapes: glr.cc ... +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=none -Werror --trace=none +stderr: +./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:288: sed 's,.*/$,,' stderr 1>&2 +stdout: +./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1296: $PREPARSER ./input +stdout: +stdout: +232. conflicts.at:1015: ok +syncline.c:4: #error "4" +syncline.c:4: #error "4" +./synclines.at:194: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./synclines.at:264: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +271. counterexample.at:797: 264. counterexample.at:298: ok + ok +268. counterexample.at:488: 269. counterexample.at:550: ok + ok +219. conflicts.at:558: ok +220. conflicts.at:564: ok + + + + + + + +stderr: +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none +stdout: +./reduce.at:1296: $PREPARSER ./input +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=none -Werror --trace=none +287. synclines.at:440: testing syncline escapes: glr2.cc ... +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Wnone,none -Werror --trace=none +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Wnone,none -Werror --trace=none +290. synclines.at:497: testing %no-lines: lalr1.cc ... +288. synclines.at:497: testing %no-lines: yacc.c ... +289. synclines.at:497: testing %no-lines: glr.c ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y +291. synclines.at:497: testing %no-lines: glr.cc ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y +293. synclines.at:507: testing Output columns ... +./synclines.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +292. synclines.at:497: testing %no-lines: glr2.cc ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y +./synclines.at:497: mv input.cc without.cc +./synclines.at:497: mv input.c without.c +./synclines.at:497: mv input.c without.c +./synclines.at:541: sed -ne '/--BEGIN/,/--END/{' \ + -e '/input.c/s/ [0-9]* / LINE /;' \ + -e 'p;}' \ + input.c +./synclines.at:497: mv input.cc without.cc +./synclines.at:497: mv input.cc without.cc +stderr: +stdout: +./output.at:836: $PREPARSER ./parser +./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:731: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stderr: +syntax error +./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c input.c +syntax error +./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c input.c +stdout: +./conflicts.at:368: $PREPARSER ./input '0<0>0' +273. counterexample.at:884: 272. counterexample.at:842: ok +syncline.c:4: #error "4" + ok +./synclines.at:254: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +stderr: +stderr: syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./conflicts.at:737: sed 's,.*/$,,' stderr 1>&2 stderr: ./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. @@ -7719,7 +7868,11 @@ syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" + | ^~~~~ +./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7746,13 +7899,45 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; + + # 1. Remove useless lines. + + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF + + + +294. headers.at:56: testing Invalid CPP guards: --defines=input/input.h ... +295. headers.at:57: testing Invalid CPP guards: --defines=9foo.h ... stderr: -stderr: +./headers.at:57: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./headers.at:56: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y +./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7779,18 +7964,17 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -222. conflicts.at:626: +stderr: +stdout: +stderr: syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; -stdout: - ok -281. synclines.at:346: testing %destructor syncline ... # 1. Remove useless lines. # distcc clutter. @@ -7814,70 +7998,139 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -stdout: +./synclines.at:497: mv input.hh without.hh +./synclines.at:497: mv input.hh without.hh +./synclines.at:497: mv input.hh without.hh +./synclines.at:497: mv input.h without.h +./synclines.at:497: mv input.h without.h stderr: -./synclines.at:346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c input.c +./output.at:836: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +293. synclines.at:507: ok +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./headers.at:57: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c +./headers.at:56: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y +stderr: +./synclines.at:497: grep '#line' *.cc *.hh +syntax error, unexpected '>' +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y +./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c input.c +stdout: +stdout: +stdout: +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 2.y +251. conflicts.at:1515: stdout: +296. headers.at:58: testing Invalid CPP guards: %glr-parser --defines=input/input.h ... + ok stdout: syncline.c:4: #error "4" -./synclines.at:310: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] - First example A a . D $end - Shift derivation $accept -> [ s -> [ A a d -> [ . D ] ] $end ] - Second example A a . D E $end - Reduce derivation $accept -> [ s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] E ] $end ] -stderr: syncline.c:4: #error "4" -./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] - Example A a . D - Shift derivation s -> [ A a d -> [ . D ] ] - Reduce derivation s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] ] -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] - Example H i J . J J - Shift derivation s -> [ a -> [ H i J . J ] J ] - Reduce derivation s -> [ a -> [ H i -> [ i J . ] J J ] ] -input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr syncline.c:4: #error "4" -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y --warnings=error -./synclines.at:264: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./synclines.at:291: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./synclines.at:346: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +syncline.c:4: #error "4" ./synclines.at:327: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -stdout: -./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./synclines.at:310: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 syncline.c:4: #error "4" -./synclines.at:291: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:370: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./headers.at:58: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y + +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none +stderr: stdout: -./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c input.c -./conflicts.at:2239: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +297. headers.at:59: testing Invalid CPP guards: %glr-parser --defines=9foo.h ... +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=none -Werror --trace=none +./headers.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y +stderr: +stderr: stdout: -./conflicts.at:381: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: +./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1096: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./headers.at:58: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y +./conflicts.at:780: cat input.output +./synclines.at:497: grep '#line' *.c *.h +./synclines.at:497: grep '#line' *.cc *.hh +./synclines.at:497: grep '#line' *.c *.h +./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:497: grep '#line' *.cc *.hh ./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c input.c -282. synclines.at:370: testing %printer syncline ... -syncline.c:4: #error "4" -./synclines.at:194: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -271. counterexample.at:797: ok - -syncline.c:4: #error "4" ./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c input.c -./synclines.at:214: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -272. counterexample.at:842: ./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Werror - ok -273. counterexample.at:884: ./synclines.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - ok -283. synclines.at:440: testing syncline escapes: yacc.c ... -./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 -./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c input.c -./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c input.c -256. conflicts.at:1935: ok +144. output.at:744: ok +195. reduce.at:1296: ok +194. reduce.at:1296: ok + + + +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=none -Werror --trace=none +299. headers.at:177: testing Sane headers: ... +300. headers.at:178: testing Sane headers: %locations %debug ... +./headers.at:177: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +298. headers.at:67: testing export YYLTYPE ... +./headers.at:178: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header -o input.c input.y +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Werror +stderr: +input.y:2:2: error: #error "2" + 2 | #error "2" + | ^~~~~ +stderr: +stdout: +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Werror +stderr: +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y +./headers.at:59: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y +./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./synclines.at:194: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; + + # 1. Remove useless lines. + + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF + +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y +input.y:2:2: error: #error "2" + 2 | #error "2" + | ^~~~~ +./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y +./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +./conflicts.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.default-reduction=consistent -o input.c input.y +229. conflicts.at:764: ok stderr: ./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Werror -./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c syncline.c input.y:1:7: error: expected '{' before 'break' 1 | %union break | ^~~~~ @@ -8019,8 +8272,27 @@ input.c:1162:11: error: implicit declaration of function 'yydestruct' [-Wimplicit-function-declaration] 1162 | yydestruct ("Error: discarding", | ^~~~~~~~~~ + stderr: -./synclines.at:254: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +input.y:13:2: error: #error "13" + 13 | #error "13" + | ^~~~~ +stderr: +stderr: +2.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +2.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +2.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +2.y:3.12-14: error: rule useless in parser due to conflicts [-Werror=other] +input.y:11.1-18: error: deprecated directive: '%name-prefix "my_"', use '%define api.prefix {my_}' [-Werror=deprecated] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +301. headers.at:180: testing Sane headers: %glr-parser ... +./headers.at:180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; + +./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8047,10 +8319,8 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -input.y:8:2: error: #error "8" - 8 | #error "8" - | ^~~~~ -./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./headers.at:85: sed 's,.*/$,,' stderr 1>&2 +./synclines.at:254: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8071,29 +8341,72 @@ # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # 1. Remove useless lines. + + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; # Remove column. s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; # Map all combinations of "error: " and "#error: " to "#error ". s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -stderr: - -284. synclines.at:440: testing syncline escapes: glr.c ... + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF +stderr: +./conflicts.at:1651: cat input.output +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y +input.y: In function 'yyparse': +input.y:8:2: error: #error "8" + 8 | #error "8" + | ^~~~~ +stderr: +stdout: +stderr: +stdout: +input.y:2: #error "2" +stdout: +295. headers.at:57: ok +294. headers.at:56: ok +stderr: +input.y:8:2: error: #error "8" + 8 | #error "8" + | ^~~~~ stderr: -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Werror -./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 -input.y:13:2: error: #error "13" - 13 | #error "13" - | ^~~~~ +sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./synclines.at:497: mv input.c with.c +./synclines.at:497: mv input.cc with.cc +stderr: +302. headers.at:181: testing Sane headers: %locations %debug %glr-parser ... +./headers.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y input.y:2:2: error: #error "2" 2 | #error "2" | ^~~~~ -stderr: -./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +303. headers.at:183: testing Sane headers: api.pure ... +./headers.at:183: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +./synclines.at:497: mv input.cc with.cc +./synclines.at:194: cat stdout +./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8120,7 +8433,7 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8147,11 +8460,11 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -input.y: In function 'yyparse': -input.y:8:2: error: #error "8" - 8 | #error "8" - | ^~~~~ -stdout: +./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS +./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./conflicts.at:2317: sed 's,.*/$,,' stderr 1>&2 +./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS +./conflicts.at:2359: sed 's,.*/$,,' stderr 1>&2 ./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8179,6 +8492,15 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +./synclines.at:497: mv input.cc with.cc +./conflicts.at:2239: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +./conflicts.at:1145: cat input.output +./synclines.at:497: mv input.hh with.hh +./synclines.at:497: mv input.hh with.hh +./synclines.at:497: mv input.h with.h +stdout: +stdout: +stdout: input.y:1: #error expected '{' before 'break' %union break ^~~~~ @@ -8317,240 +8639,27 @@ input.c:1162: #error implicit declaration of function 'yydestruct' [-Wimplicit-function-declaration] yydestruct ("Error: discarding", ^~~~~~~~~~ -./synclines.at:255: grep '^input.y:1' stdout -stdout: -stdout: -stdout: -input.y:1: #error expected '{' before 'break' -input.y:1: #error expected '{' before 'break' input.y:13: #error "13" -input.y:8: #error "8" -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y -Wnone,none -Werror --trace=none -./synclines.at:264: cat stdout -276. synclines.at:237: ok -stderr: -stdout: -./synclines.at:310: cat stdout input.y:2: #error "2" +./conflicts.at:1836: cat input.y >> input-keep.y stderr: -./synclines.at:327: cat stdout -input.y:2:2: error: #error "2" - 2 | #error "2" - | ^~~~~ -stdout: +input.y: In function 'yy_symbol_value_print': input.y:2:2: error: #error "2" 2 | #error "2" | ^~~~~ -./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - -286. synclines.at:440: testing syncline escapes: glr.cc ... -287. synclines.at:440: testing syncline escapes: glr2.cc ... -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -input.y:8: #error "8" -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c syncline.c -./synclines.at:291: cat stdout -./synclines.at:194: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - -285. synclines.at:440: testing syncline escapes: lalr1.cc ... -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -279. synclines.at:310: ok -stderr: -277. synclines.at:264: ok -stdout: -280. synclines.at:327: ok -stdout: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" - | ^~~~~ -input.y:2: #error "2" -input.y:2: #error "2" -./synclines.at:214: cat stdout -288. synclines.at:497: testing %no-lines: yacc.c ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y -./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - -./synclines.at:194: cat stdout -278. synclines.at:291: ok -stderr: - -275. synclines.at:214: ok -stderr: stderr: -sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input-keep.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input-keep.y: error: 2 reduce/reduce conflicts [-Werror=conflicts-rr] -input-keep.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input-keep.y:22.4: error: rule useless in parser due to conflicts [-Werror=other] -input-keep.y:26.16: error: rule useless in parser due to conflicts [-Werror=other] -input-keep.y:32.5-7: error: rule useless in parser due to conflicts [-Werror=other] -input-keep.y:33.4: error: rule useless in parser due to conflicts [-Werror=other] stderr: - -stdout: -274. synclines.at:194: ok -stdout: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" - | ^~~~~ -./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - -./conflicts.at:1096: $PREPARSER ./input -syncline.c:4: #error "4" -./conflicts.at:1838: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:2359: sed 's,.*/$,,' stderr 1>&2 -./synclines.at:346: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 - stderr: -syntax error - - -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c input.c stdout: -stderr: -syncline.c:4: #error "4" -stderr: -290. synclines.at:497: testing %no-lines: lalr1.cc ... -./synclines.at:370: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=error stdout: - -2.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -2.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -2.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -2.y:3.12-14: error: rule useless in parser due to conflicts [-Werror=other] -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -233. conflicts.at:1096: ./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c input.c - ok -289. synclines.at:497: testing %no-lines: glr.c ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=error -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y -./synclines.at:497: mv input.c without.c -./synclines.at:497: mv input.h without.h -294. headers.at:56: testing Invalid CPP guards: --defines=input/input.h ... -293. synclines.at:507: testing Output columns ... -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y --warnings=none -Werror --trace=none -./synclines.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:2317: sed 's,.*/$,,' stderr 1>&2 - -292. synclines.at:497: testing %no-lines: glr2.cc ... -./synclines.at:497: grep '#line' *.c *.h -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -./headers.at:56: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y -stderr: input.y: In function 'yydestruct': input.y:2:2: error: #error "2" 2 | #error "2" | ^~~~~ +./conflicts.at:732: $PREPARSER ./input +./conflicts.at:1096: $PREPARSER ./input +./synclines.at:497: mv input.c with.c +./synclines.at:255: grep '^input.y:1' stdout ./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8578,29 +8687,8 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -295. headers.at:57: testing Invalid CPP guards: --defines=9foo.h ... -291. synclines.at:497: testing %no-lines: glr.cc ... -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=error -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -./headers.at:57: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y -stdout: -input.y:2: #error "2" -./synclines.at:346: cat stdout -stderr: -./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS -stdout: -stderr: -296. headers.at:58: testing Invalid CPP guards: %glr-parser --defines=input/input.h ... -281. synclines.at:346: ok -stdout: -stderr: -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y -./conflicts.at:513: $PREPARSER ./input -input.y: In function 'yy_symbol_value_print': -input.y:2:2: error: #error "2" - 2 | #error "2" - | ^~~~~ -./synclines.at:497: mv input.cc without.cc +./synclines.at:264: cat stdout +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=error ./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8628,292 +8716,149 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +./synclines.at:214: cat stdout +./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS +./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS +./conflicts.at:372: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stderr: +./synclines.at:497: mv input.hh with.hh +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-keep.y +./synclines.at:497: grep -v '#line' with.c >expout +./synclines.at:497: grep -v '#line' with.cc >expout +./synclines.at:497: mv input.h with.h +syntax error +syntax error, unexpected 'a', expecting 'b' stdout: -syntax error, unexpected end of file -./conflicts.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./synclines.at:497: mv input.hh without.hh -stderr: -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Wnone,none -Werror --trace=none -./synclines.at:497: grep '#line' *.cc *.hh stdout: -./headers.at:58: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y -./synclines.at:497: mv input.c without.c -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y - -211. conflicts.at:513: ./headers.at:56: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c - ok stdout: -./synclines.at:497: mv input.c with.c -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y -./synclines.at:541: sed -ne '/--BEGIN/,/--END/{' \ - -e '/input.c/s/ [0-9]* / LINE /;' \ - -e 'p;}' \ - input.c input.y:2: #error "2" -./synclines.at:497: mv input.h without.h -./synclines.at:370: cat stdout -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Wnone,none -Werror --trace=none -./synclines.at:497: mv input.h with.h -./synclines.at:497: grep '#line' *.c *.h -293. synclines.at:507: ok -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Wnone,none -Werror --trace=none -./synclines.at:497: grep -v '#line' with.c >expout -282. synclines.at:370: ok -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y -./headers.at:57: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c -./synclines.at:497: cat without.c -297. headers.at:59: testing Invalid CPP guards: %glr-parser --defines=9foo.h ... -./synclines.at:497: grep -v '#line' with.h >expout -./headers.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y -./synclines.at:497: mv input.cc without.cc - - -./synclines.at:497: mv input.hh without.hh -./synclines.at:497: grep '#line' *.cc *.hh -./synclines.at:497: mv input.cc without.cc -./synclines.at:497: cat without.h -./conflicts.at:742: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -stderr: -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y -stdout: -./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS -./synclines.at:497: mv input.hh without.hh +input.y:8: #error "8" +274. synclines.at:194: ok +208. conflicts.at:275: input.y:8: #error "8" + ok +238. conflicts.at:1127: 256. conflicts.at:1935: ok + ok stderr: -132. output.at:337: ok -288. synclines.at:497: ok -./synclines.at:497: mv input.cc with.cc -stdout: -./synclines.at:497: grep '#line' *.cc *.hh -./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS -299. headers.at:177: testing Sane headers: ... -298. headers.at:67: testing export YYLTYPE ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y -./synclines.at:497: mv input.hh with.hh -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y -./headers.at:177: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header -o input.c input.y -./synclines.at:497: grep -v '#line' with.cc >expout -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=none -Werror --trace=none - -300. headers.at:178: testing Sane headers: %locations %debug ... -./synclines.at:497: cat without.cc -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=none -Werror --trace=none -./synclines.at:497: mv input.c with.c -./headers.at:178: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./headers.at:58: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c -stderr: -./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS stdout: -./synclines.at:497: grep -v '#line' with.hh >expout -./synclines.at:497: mv input.h with.h -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=none -Werror --trace=none -./synclines.at:497: cat without.hh -./headers.at:59: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c -301. headers.at:180: testing Sane headers: %glr-parser ... -135. output.at:340: ok -./synclines.at:497: grep -v '#line' with.c >expout -302. headers.at:181: testing Sane headers: %locations %debug %glr-parser ... -./headers.at:180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./synclines.at:497: cat without.c -./synclines.at:497: grep -v '#line' with.h >expout -290. synclines.at:497: ok -./headers.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y - -./synclines.at:497: cat without.h -./synclines.at:440: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS -./synclines.at:497: mv input.cc with.cc -./synclines.at:497: mv input.hh with.hh -./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -289. synclines.at:497: ./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Werror - ok -./synclines.at:497: grep -v '#line' with.cc >expout -./synclines.at:497: mv input.cc with.cc -257. conflicts.at:2299: ok -./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./synclines.at:497: cat without.cc -./synclines.at:497: mv input.hh with.hh -./synclines.at:497: grep -v '#line' with.cc >expout -./synclines.at:497: grep -v '#line' with.hh >expout -303. headers.at:183: testing Sane headers: api.pure ... -./synclines.at:497: cat without.cc -./headers.at:183: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./synclines.at:497: cat without.hh 304. headers.at:184: testing Sane headers: api.push-pull=both ... -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y ./headers.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./synclines.at:497: grep -v '#line' with.hh >expout -stderr: -stdout: -./synclines.at:497: cat without.hh -stderr: -stdout: -283. synclines.at:440: 292. synclines.at:497: ok - ok -136. output.at:341: ok -291. synclines.at:497: ok -254. conflicts.at:1592: ok -./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -stderr: -./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -stdout: 306. headers.at:187: testing Sane headers: c++ ... ./headers.at:187: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y 305. headers.at:185: testing Sane headers: api.pure api.push-pull=both ... ./headers.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -294. headers.at:56: ok - - - +307. headers.at:188: testing Sane headers: %locations %debug c++ ... +./headers.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y +./synclines.at:327: cat stdout +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=error +./synclines.at:310: cat stdout +./conflicts.at:732: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./synclines.at:291: cat stdout +./synclines.at:440: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS +./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +stdout: +./synclines.at:497: grep -v '#line' with.c >expout +./synclines.at:497: cat without.cc +./synclines.at:497: cat without.c +./synclines.at:497: grep -v '#line' with.cc >expout +input.y:1: #error expected '{' before 'break' +input.y:1: #error expected '{' before 'break' +stdout: +./synclines.at:497: grep -v '#line' with.cc >expout +input.y:2: #error "2" +stdout: +275. synclines.at:214: 277. synclines.at:264: ok + ok +input.y:2: #error "2" +276. synclines.at:237: ok -stderr: -./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -stdout: -stderr: -input.y:11.1-18: error: deprecated directive: '%name-prefix "my_"', use '%define api.prefix {my_}' [-Werror=deprecated] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -295. headers.at:57: ok -308. headers.at:189: testing Sane headers: c++ api.value.type=variant parse.assert ... -./headers.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y -307. headers.at:188: testing Sane headers: %locations %debug c++ ... 309. headers.at:191: testing Sane headers: %locations c++ %glr-parser ... -311. actions.at:24: testing Midrule actions ... -./actions.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -./headers.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y +308. headers.at:189: testing Sane headers: c++ api.value.type=variant parse.assert ... ./headers.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y -312. actions.at:72: testing Typed midrule actions ... -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc -./headers.at:85: sed 's,.*/$,,' stderr 1>&2 -./actions.at:109: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y - +./headers.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Wnone,none -Werror --trace=none 310. headers.at:199: testing Several parsers ... -./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c ./headers.at:320: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x1.c x1.y +./synclines.at:370: cat stdout ./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Werror -stderr: -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=error -stdout: -./conflicts.at:381: $PREPARSER ./input '0<0' -stderr: -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:381: $PREPARSER ./input '0<0<0' -313. actions.at:122: testing Implicitly empty rule ... -stderr: -syntax error, unexpected '<', expecting end of file -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -./conflicts.at:381: $PREPARSER ./input '0>0' -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc -stderr: ./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:60: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:110: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2363: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:381: $PREPARSER ./input '0>0>0' -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Werror -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc ./headers.at:320: $CC $CFLAGS $CPPFLAGS -c -o x1.o x1.c -stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=error -./conflicts.at:381: $PREPARSER ./input '0<0>0' -stderr: -1.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] - 11 | a: /* empty. */ {}; - | ^~ - | %empty -1.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -stderr: -stdout: -stderr: -./actions.at:133: sed 's,.*/$,,' stderr 1>&2 -syntax error, unexpected '>', expecting end of file -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Wnone,none -Werror --trace=none -./headers.at:102: $CC $CFLAGS $CPPFLAGS -c -o caller.o caller.c -130. output.at:335: ok -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=error -./conflicts.at:388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.c input.y +./synclines.at:346: cat stdout +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./synclines.at:497: cat without.cc +./synclines.at:497: cat without.c +./synclines.at:497: grep -v '#line' with.hh >expout +./synclines.at:497: grep -v '#line' with.h >expout +./synclines.at:497: cat without.cc +278. synclines.at:291: 280. synclines.at:327: ok + ok +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y +233. conflicts.at:1096: ok +279. synclines.at:310: ok +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Werror -stderr: -stdout: -stderr: -./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -stdout: + + + +311. actions.at:24: testing Midrule actions ... +./actions.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Wnone,none -Werror --trace=none +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Wnone,none -Werror --trace=none +312. actions.at:72: testing Typed midrule actions ... +./actions.at:109: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y 314. actions.at:172: testing Invalid uses of %empty ... ./actions.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret one.y -./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=none -Werror --trace=none +313. actions.at:122: testing Implicitly empty rule ... +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y ./actions.at:192: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -u one.y stderr: stdout: +./conflicts.at:1096: $PREPARSER ./input stderr: -./conflicts.at:388: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -305. headers.at:185: ok -stdout: -./headers.at:103: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -stderr: -stdout: +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Werror +./synclines.at:497: grep -v '#line' with.hh >expout +syntax error +./synclines.at:497: grep -v '#line' with.hh >expout +281. synclines.at:346: ok +./synclines.at:497: grep -v '#line' with.h >expout ./actions.at:202: sed -e '1,8d' one.y -./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -./conflicts.at:743: $PREPARSER ./input -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Wnone,none -Werror --trace=none -stderr: -stderr: -syntax error, unexpected 'a', expecting 'b' or 'c' -./conflicts.at:743: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stdout: -303. headers.at:183: 133. output.at:338: ok - ok -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y +282. synclines.at:370: ok + stderr: stdout: -./headers.at:320: echo "x1" >>expout -./actions.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret two.y -./headers.at:321: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x2.c x2.y -314. actions.at:172: ok +stderr: +input-keep.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input-keep.y: error: 2 reduce/reduce conflicts [-Werror=conflicts-rr] +input-keep.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input-keep.y:22.4: error: rule useless in parser due to conflicts [-Werror=other] +input-keep.y:26.16: error: rule useless in parser due to conflicts [-Werror=other] +input-keep.y:32.5-7: error: rule useless in parser due to conflicts [-Werror=other] +input-keep.y:33.4: error: rule useless in parser due to conflicts [-Werror=other] 315. actions.at:240: testing Valid uses of %empty ... +316. actions.at:270: testing Add missing %empty ... +./actions.at:285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update -Wall input.y ./actions.at:259: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -./actions.at:259: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./headers.at:321: $CC $CFLAGS $CPPFLAGS -c -o x2.o x2.c stderr: +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=none -Werror --trace=none stdout: -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Werror -./conflicts.at:1096: $PREPARSER ./input -stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -316. actions.at:270: testing Add missing %empty ... -stderr: -./actions.at:285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update -Wall input.y -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -234. conflicts.at:1096: ok -./conflicts.at:748: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=none -Werror --trace=none +296. headers.at:58: ok stderr: input.y:3.4-5: warning: empty rule without %empty [-Wempty-rule] input.y:4.3-5.1: warning: empty rule without %empty [-Wempty-rule] @@ -8922,494 +8867,550 @@ input.y:9.3: warning: empty rule without %empty [-Wempty-rule] bison: file 'input.y' was updated (backup: 'input.y~') ./actions.at:286: cat input.y + +stderr: +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./actions.at:60: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./synclines.at:497: cat without.hh +./conflicts.at:1838: sed 's,.*/$,,' stderr 1>&2 +stdout: +./synclines.at:497: cat without.h +./actions.at:110: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +283. synclines.at:440: ./actions.at:300: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y + ok +stdout: +./actions.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret two.y +299. headers.at:177: ok 317. actions.at:365: testing Initial location: yacc.c ... ./actions.at:365: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + stderr: stdout: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:300: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y - -134. output.at:339: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +297. headers.at:59: ok +stderr: +1.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] + 11 | a: /* empty. */ {}; + | ^~ + | %empty +1.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] 318. actions.at:366: testing Initial location: yacc.c api.pure=full ... ./actions.at:366: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + 319. actions.at:367: testing Initial location: yacc.c api.pure %parse-param { int x } ... -stderr: ./actions.at:367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:365: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stdout: - -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -296. headers.at:58: ok -316. actions.at:270: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file 320. actions.at:368: testing Initial location: yacc.c api.push-pull=both ... ./actions.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -stderr: - -stdout: -stdout: -stderr: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:518: $PREPARSER ./input -stdout: -stderr: - -syntax error, unexpected end of file -./actions.at:111: $PREPARSER ./input -./conflicts.at:518: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:365: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./synclines.at:497: cat without.hh +./actions.at:259: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./synclines.at:497: cat without.h +./synclines.at:497: cat without.hh +./actions.at:133: sed 's,.*/$,,' stderr 1>&2 ./actions.at:366: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=none -Werror --trace=none -212. conflicts.at:518: ./actions.at:111: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -321. actions.at:369: testing Initial location: yacc.c api.push-pull=both api.pure=full ... -./actions.at:369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -312. actions.at:72: ok - -322. actions.at:370: testing Initial location: glr.c ... -./actions.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=error -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stdout: +292. synclines.at:497: ok +289. synclines.at:497: ok +234. conflicts.at:1096: ok +314. actions.at:172: ok +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y -Werror -323. actions.at:371: testing Initial location: glr.c api.pure ... -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret 2.y -./actions.at:369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + + + +316. actions.at:270: ok +321. actions.at:369: testing Initial location: yacc.c api.push-pull=both api.pure=full ... +322. actions.at:370: testing Initial location: glr.c ... +./actions.at:369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 324. actions.at:372: testing Initial location: lalr1.cc ... ./actions.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -304. headers.at:184: ok -stderr: -stdout: +323. actions.at:371: testing Initial location: glr.c api.pure ... +./actions.at:371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + stderr: stdout: -./headers.at:104: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o caller caller.o input.o $LIBS -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Wnone,none -Werror --trace=none -129. output.at:328: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file - -./actions.at:371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Werror -./actions.at:372: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS - 325. actions.at:373: testing Initial location: glr.cc ... ./actions.at:373: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -stderr: -stdout: -stderr: -stdout: -284. synclines.at:440: ok +./conflicts.at:372: $PREPARSER ./input '0<0' +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=error +./headers.at:102: $CC $CFLAGS $CPPFLAGS -c -o caller.o caller.c +./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +291. synclines.at:497: ./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y + ok +288. synclines.at:497: 290. synclines.at:497: ok + ok +257. conflicts.at:2299: ok + + + +327. actions.at:383: testing Initial location: yacc.c api.pure=full ... 326. actions.at:374: testing Initial location: glr2.cc ... ./actions.at:374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:383: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +328. actions.at:394: testing Initial location: yacc.c api.pure=full ... +./actions.at:394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -stdout: -299. headers.at:177: ok +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: stdout: -./output.at:835: $CXX $CPPFLAGS $CXXFLAGS -Iout/ $LDFLAGS -o parser out/x[12].o main.cc $LIBS -stderr: -2.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] - 11 | a: /* empty. */ {}; - | ^~ - | %empty -2.y:13.17-18: error: empty rule without %empty [-Werror=empty-rule] - 13 | c: /* empty. */ {}; - | ^~ - | %empty -2.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=none -Werror --trace=none +300. headers.at:178: ok +329. actions.at:478: testing Location print: yacc.c ... +./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -297. headers.at:59: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +330. actions.at:478: testing Location print: glr.c ... +./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Wnone,none -Werror --trace=none +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=error +./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:737: sed 's,.*/$,,' stderr 1>&2 +./actions.at:369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:372: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:373: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:383: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:372: $PREPARSER ./input '0<0<0' stderr: stdout: -./headers.at:105: $PREPARSER ./caller -stdout: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -stderr: -./actions.at:149: sed 's,.*/$,,' stderr 1>&2 stderr: -./headers.at:105: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - stdout: - -stderr: -./actions.at:374: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./headers.at:320: echo "x1" >>expout +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=none -Werror --trace=none stderr: stdout: -stdout: -./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -298. headers.at:67: ok -137. output.at:342: ok -./actions.at:260: $PREPARSER ./input -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -327. actions.at:383: testing Initial location: yacc.c api.pure=full ... -stderr: -./actions.at:383: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=error -./actions.at:260: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -./actions.at:373: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -315. actions.at:240: ok stderr: stdout: - -329. actions.at:478: testing Location print: yacc.c ... -./conflicts.at:388: $PREPARSER ./input '0<0' - -328. actions.at:394: testing Initial location: yacc.c api.pure=full ... -./actions.at:394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +303. headers.at:183: ok stderr: +./headers.at:103: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./actions.at:374: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stderr: +./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -300. headers.at:178: ok +./headers.at:321: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x2.c x2.y +syntax error, unexpected '<', expecting end of file +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Werror -./conflicts.at:753: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stdout: -./conflicts.at:388: $PREPARSER ./input '0<0<0' -330. actions.at:478: testing Location print: glr.c ... -stderr: -./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 331. actions.at:478: testing Location print: lalr1.cc ... -syntax error, unexpected '<', expecting end of file -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -301. headers.at:180: ok stderr: stdout: -./actions.at:383: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:388: $PREPARSER ./input '0>0' -332. actions.at:478: testing Location print: glr.cc ... -./conflicts.at:564: $PREPARSER ./input stderr: -./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stdout: +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ +./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c + -o input.c input.y --warnings=error +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Wnone,none -Werror --trace=none +284. synclines.at:440: ok -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Wnone,none -Werror --trace=none -./conflicts.at:388: $PREPARSER ./input '0>0>0' -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:564: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +332. actions.at:478: testing Location print: glr.cc ... +sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./conflicts.at:2363: sed 's,.*/$,,' stderr 1>&2 +./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./conflicts.at:372: $PREPARSER ./input '0>0' stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +305. headers.at:185: ok +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=none -Werror --trace=none 333. actions.at:478: testing Location print: glr2.cc ... ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:388: $PREPARSER ./input '0<0>0' -220. conflicts.at:564: ok -stderr: +./headers.at:321: $CC $CFLAGS $CPPFLAGS -c -o x2.o x2.c ./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -syntax error, unexpected '>', expecting end of file -./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y -Wnone,none -Werror --trace=none +254. conflicts.at:1592: ok stderr: stderr: stdout: -334. actions.at:488: testing Exotic Dollars ... -./actions.at:532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:61: $PREPARSER ./input -209. conflicts.at:301: - ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file + stdout: +./actions.at:260: $PREPARSER ./input +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=error +304. headers.at:184: ok stderr: -./headers.at:321: echo "x2" >>expout -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=none -Werror --trace=none -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x3.c x3.y -./actions.at:61: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:372: $PREPARSER ./input '0>0>0' +./actions.at:260: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret 2.y +stderr: +334. actions.at:488: testing Exotic Dollars ... +stdout: +./actions.at:532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -311. actions.at:24: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 335. actions.at:1047: testing Printers and Destructors ... ./actions.at:1047: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stdout: -336. actions.at:1048: testing Printers and Destructors with union ... -./actions.at:478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:558: $PREPARSER ./input -./actions.at:1048: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:367: $PREPARSER ./input +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y --warnings=none -Werror --trace=none stderr: +./actions.at:533: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: -./actions.at:367: $PREPARSER ./input -219. conflicts.at:558: ok -./headers.at:322: $CC $CFLAGS $CPPFLAGS -c -o x3.o x3.c +./headers.at:104: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o caller caller.o input.o $LIBS stderr: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file 1.1 1.1: syntax error -./actions.at:533: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +315. actions.at:240: stderr: ./actions.at:367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -337. actions.at:1050: testing Printers and Destructors: %glr-parser ... -./actions.at:1050: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-empty-rule 2.y + ok +301. headers.at:180: ok +syntax error, unexpected '>', expecting end of file -319. actions.at:367: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -338. actions.at:1051: testing Printers and Destructors with union: %glr-parser ... -./actions.at:1051: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1047: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1048: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -339. actions.at:1053: testing Printers and Destructors: %header lalr1.cc ... -./actions.at:1053: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: stdout: ./actions.at:366: $PREPARSER ./input stderr: -1.1 -1.1: syntax error -./actions.at:366: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -313. actions.at:122: ok -318. actions.at:366: ok -./actions.at:1050: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file - - -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -340. actions.at:1054: testing Printers and Destructors with union: %header lalr1.cc ... -./actions.at:1054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:1053: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -341. actions.at:1056: testing Printers and Destructors: %header glr.cc ... -./actions.at:1056: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:1051: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:383: $PREPARSER ./input +stdout: +336. actions.at:1048: testing Printers and Destructors with union ... +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Wnone,none -Werror --trace=none +./actions.at:1048: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:61: $PREPARSER ./input +337. actions.at:1050: testing Printers and Destructors: %glr-parser ... +./actions.at:1050: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Werror stderr: stdout: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:365: $PREPARSER ./input stderr: -./conflicts.at:1096: $PREPARSER ./input stdout: ./actions.at:368: $PREPARSER ./input +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1047: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stderr: +stderr: +stderr: stderr: +./actions.at:1048: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +1.1 +1.1: syntax error + +: syntax error +./actions.at:365: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1 +1.1: syntax error 1.1 1.1: syntax error +./actions.at:366: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:383: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:61: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +319. actions.at:367: ok stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -320. actions.at:368: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -236. conflicts.at:1096: ok -./actions.at:1054: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file - +stdout: +./conflicts.at:1096: $PREPARSER ./input -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -342. actions.at:1057: testing Printers and Destructors with union: %header glr.cc ... -./actions.at:1057: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -343. actions.at:1059: testing Printers and Destructors: %header glr2.cc ... -./actions.at:1059: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:1056: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: +2.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] + 11 | a: /* empty. */ {}; + | ^~ + | %empty +2.y:13.17-18: error: empty rule without %empty [-Werror=empty-rule] + 13 | c: /* empty. */ {}; + | ^~ + | %empty +2.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +stderr: +stdout: +338. actions.at:1051: testing Printers and Destructors with union: %glr-parser ... stderr: +./actions.at:1051: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stdout: stderr: +./actions.at:394: $PREPARSER ./input +stderr: stdout: -./actions.at:383: $PREPARSER ./input -./actions.at:478: $PREPARSER ./input +./actions.at:111: $PREPARSER ./input stdout: -./actions.at:365: $PREPARSER ./input +./actions.at:369: $PREPARSER ./input +./headers.at:105: $PREPARSER ./caller +./actions.at:149: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1050: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:742: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: stderr: -1.1 -1.1: syntax error -./actions.at:365: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -: syntax error -./actions.at:383: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:394: $PREPARSER ./input -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -317. actions.at:365: ok stderr: 0 0: syntax error -329. actions.at:478: ok +1.1 +1.1: syntax error ./actions.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:105: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +311. actions.at:24: ./conflicts.at:372: $PREPARSER ./input '0<0>0' + ok +317. actions.at:365: ok +320. actions.at:368: ./actions.at:111: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +327. actions.at:383: ok + + + + stderr: stdout: -./conflicts.at:1096: $PREPARSER ./input -327. actions.at:383: ok +./actions.at:478: $PREPARSER ./input +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=none -Werror --trace=none +339. actions.at:1053: testing Printers and Destructors: %header lalr1.cc ... +./actions.at:1053: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +340. actions.at:1054: testing Printers and Destructors with union: %header lalr1.cc ... +./actions.at:1054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +341. actions.at:1056: testing Printers and Destructors: %header glr.cc ... +./actions.at:1056: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +342. actions.at:1057: testing Printers and Destructors with union: %header glr.cc ... +./actions.at:1057: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:1051: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -syntax error -328. actions.at:394: ok ./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file - stderr: +syntax error, unexpected '>', expecting end of file +318. actions.at:366: ok +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +298. headers.at:67: ok +328. actions.at:394: ok +321. actions.at:369: ok + -stdout: -./actions.at:369: $PREPARSER ./input -stderr: -1.1 -1.1: syntax error -./actions.at:369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -235. conflicts.at:1096: ok -321. actions.at:369: ok 344. actions.at:1060: testing Printers and Destructors with union: %header glr2.cc ... -./actions.at:1060: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +343. actions.at:1059: testing Printers and Destructors: %header glr2.cc ... 345. actions.at:1071: testing Default tagless %printer and %destructor ... -./actions.at:1059: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +./actions.at:1060: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:1059: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +./conflicts.at:1096: $PREPARSER ./input 346. actions.at:1174: testing Default tagged and per-type %printer and %destructor ... ./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=error +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1057: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1054: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1056: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1053: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +312. actions.at:72: ok +syntax error +329. actions.at:478: ok +236. conflicts.at:1096: ok +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -347. actions.at:1307: testing Default %printer and %destructor for user-defined end token ... -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input0.c input0.y -./actions.at:1057: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror 348. actions.at:1429: testing Default %printer and %destructor are not for error or $undefined ... ./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror 349. actions.at:1532: testing Default %printer and %destructor are not for $accept ... ./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -stderr: -stdout: -./conflicts.at:754: $PREPARSER ./input +347. actions.at:1307: testing Default %printer and %destructor for user-defined end token ... +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input0.c input0.y +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./conflicts.at:381: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y ./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Werror -stderr: -syntax error, unexpected 'a', expecting 'b' or 'c' -./conflicts.at:754: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +stderr: +stdout: +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Wnone,none -Werror --trace=none +285. synclines.at:440: ok +./actions.at:1059: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./actions.at:1060: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -228. conflicts.at:676: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stderr: input.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] input.y:30.3-5: error: useless %printer for type <*> [-Werror=other] -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -stdout: -285. synclines.at:440: stderr: +235. conflicts.at:1096: ok - ok -input0.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] -input0.y:30.3-5: error: useless %printer for type <*> [-Werror=other] -./actions.at:1116: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1416: sed 's,.*/$,,' stderr 1>&2 +stdout: stderr: -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file - input.y:22.3-4: error: useless %destructor for type <> [-Werror=other] input.y:22.3-4: error: useless %printer for type <> [-Werror=other] +stderr: + +stdout: +./headers.at:321: echo "x2" >>expout 350. actions.at:1596: testing Default %printer and %destructor for midrule values ... ./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -input.y:23.6-8: error: useless %destructor for type <*> [-Werror=other] -input.y:23.6-8: error: useless %printer for type <*> [-Werror=other] -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=error -./actions.at:1233: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1474: sed 's,.*/$,,' stderr 1>&2 +stderr: +stdout: stderr: input.y:24.3-4: error: useless %destructor for type <> [-Werror=other] input.y:24.3-4: error: useless %printer for type <> [-Werror=other] -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:370: $PREPARSER ./input +input0.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] +input0.y:30.3-5: error: useless %printer for type <*> [-Werror=other] 351. actions.at:1743: testing @$ in %initial-action implies %locations ... ./actions.at:1743: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./actions.at:1582: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./actions.at:1743: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=none -Werror --trace=none +stderr: +input.y:23.6-8: error: useless %destructor for type <*> [-Werror=other] +input.y:23.6-8: error: useless %printer for type <*> [-Werror=other] ./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Wnone,none -Werror --trace=none -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:1416: sed 's,.*/$,,' stderr 1>&2 +./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./actions.at:1743: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1474: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1116: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1233: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1582: sed 's,.*/$,,' stderr 1>&2 +stderr: +./headers.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x3.c x3.y +1.1 +1.1: syntax error +./actions.at:370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none stdout: -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=none -Werror --trace=none +./actions.at:371: $PREPARSER ./input +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:381: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: +stderr: +1.1 +1.1: syntax error +./actions.at:371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] input.y:33.3-23: error: unset value: $$ [-Werror=other] input.y:32.3-23: error: unused value: $3 [-Werror=other] +322. actions.at:370: ok + stderr: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -stdout: -302. headers.at:181: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=error ./actions.at:1634: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file - -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +stdout: +./actions.at:534: $PREPARSER ./input ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input0 input0.c $LIBS +stderr: 352. actions.at:1744: testing @$ in %destructor implies %locations ... +stdout: ./actions.at:1744: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1586: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +302. headers.at:181: ok + +353. actions.at:1745: testing @$ in %printer implies %locations ... +./actions.at:1745: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +286. synclines.at:440: ok +stderr: +stdout: +./conflicts.at:743: $PREPARSER ./input + +354. actions.at:1856: testing Qualified $$ in actions: yacc.c ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./headers.at:322: $CC $CFLAGS $CPPFLAGS -c -o x3.o x3.c +./actions.at:1744: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1745: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stderr: +syntax error, unexpected 'a', expecting 'b' or 'c' +323. actions.at:371: ok +./actions.at:534: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +355. actions.at:1856: testing Qualified $$ in actions: glr.c ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Wnone,none -Werror --trace=none ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:743: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-empty-rule 2.y +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none stderr: stdout: +./actions.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: -./actions.at:371: $PREPARSER ./input -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +./actions.at:1048: $PREPARSER ./input '(x)' +stdout: +stderr: ./actions.at:1047: $PREPARSER ./input '(x)' +stdout: +./actions.at:478: $PREPARSER ./input +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=none -Werror --trace=none +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none stderr: +stdout: +351. actions.at:1743: ok stderr: -./actions.at:1744: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: +./conflicts.at:381: $PREPARSER ./input '0<0' + +stderr: +stdout: +356. actions.at:1856: testing Qualified $$ in actions: lalr1.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +stderr: +stderr: +stderr: +313. actions.at:122: stderr: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (0@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. + ok +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9422,19 +9423,35 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1 -1.1: syntax error -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:534: $PREPARSER ./input -./actions.at:1120: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Werror + +357. actions.at:1856: testing Qualified $$ in actions: glr.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:562: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1586: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +330. actions.at:478: ok +./actions.at:1048: $PREPARSER ./input '!' +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file + +stderr: +stdout: +352. actions.at:1744: ok +358. actions.at:1856: testing Qualified $$ in actions: glr2.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y + stderr: ./actions.at:1478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1047: $PREPARSER ./input '!' -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1120: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input0 input0.c $LIBS +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples stderr: -./actions.at:534: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +359. actions.at:1863: testing Destroying lookahead assigned by semantic action ... +./actions.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -9442,205 +9459,142 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -323. actions.at:371: ok -./actions.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1047: $PREPARSER ./input '!!!' -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:381: $PREPARSER ./input '0<0<0' +./actions.at:1047: $PREPARSER ./input '!' stderr: +stdout: +./actions.at:1856: $PREPARSER ./input --debug stderr: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -sending: '!' (0@0-9) -sending: '!' (1@10-19) -sending: '!' (2@20-29) -raise (5@10-29): ! (1@20-29) ! (2@20-29) -check-spontaneous-errors (5@10-29): error (@10-29) -sending: END (3@30-39) -Freeing token END (3@30-39) -Freeing nterm input (5@0-29) -Successful parse. -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./output.at:836: $PREPARSER ./parser - -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:478: $PREPARSER ./input +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:748: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./actions.at:1047: $PREPARSER ./input '(y)' +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./output.at:836: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:562: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) -sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +stderr: +syntax error, unexpected '<', expecting end of file +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Stack now 0 1 3 +Reducing stack by rule 1 (line 53): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +stderr: +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) Successful parse. -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -144. output.at:744: ok -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -353. actions.at:1745: testing @$ in %printer implies %locations ... -./actions.at:1745: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1047: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +353. actions.at:1745: ok -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: 'x' (2@20-29) -thing (2@20-29): 'x' (2@20-29) -sending: 'x' (3@30-39) -30.30-39.38: syntax error, unexpected 'x', expecting ')' -Freeing nterm thing (2@20-29) -Freeing nterm thing (1@10-19) -Freeing token 'x' (3@30-39) -sending: 'x' (4@40-49) -Freeing token 'x' (4@40-49) -sending: 'x' (5@50-59) -Freeing token 'x' (5@50-59) -sending: ')' (6@60-69) -line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) -sending: '(' (7@70-79) -sending: 'x' (8@80-89) -thing (8@80-89): 'x' (8@80-89) -sending: ')' (9@90-99) -line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) -sending: '(' (10@100-109) -sending: 'x' (11@110-119) -thing (11@110-119): 'x' (11@110-119) -sending: ')' (12@120-129) -line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) -sending: 'y' (13@130-139) -input (0@129-129): /* Nothing */ -input (2@100-129): line (10@100-129) input (0@129-129) -input (2@70-129): line (7@70-99) input (2@100-129) -input (2@0-129): line (-1@0-69) input (2@70-129) -130.130-139.138: syntax error, unexpected 'y', expecting END -Freeing nterm input (2@0-129) -Freeing token 'y' (13@130-139) -Parsing FAILED. -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -./actions.at:1047: $PREPARSER ./input '(x)(x)x' +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1856: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: 'x' (6@60-69) -thing (6@60-69): 'x' (6@60-69) -sending: END (7@70-79) -70.70-79.78: syntax error, unexpected END, expecting 'x' -Freeing nterm thing (6@60-69) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Freeing token END (7@70-79) -Parsing FAILED. -354. actions.at:1856: testing Qualified $$ in actions: yacc.c ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1745: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Stack now 0 1 3 +Reducing stack by rule 1 (line 53): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +332. actions.at:478: ok +./actions.at:1048: $PREPARSER ./input '!!!' +360. actions.at:1918: testing YYBACKUP ... +./actions.at:1953: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file + stderr: stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +361. types.at:25: testing %union vs. api.value.type ... +./types.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: stdout: -./actions.at:1047: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./actions.at:372: $PREPARSER ./input stderr: -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: '(' (6@60-69) -sending: 'x' (7@70-79) -thing (7@70-79): 'x' (7@70-79) -sending: ')' (8@80-89) -line (6@60-89): '(' (6@60-69) thing (7@70-79) ')' (8@80-89) -sending: '(' (9@90-99) -sending: 'x' (10@100-109) -thing (10@100-109): 'x' (10@100-109) -sending: ')' (11@110-119) -line (9@90-119): '(' (9@90-99) thing (10@100-109) ')' (11@110-119) -sending: '(' (12@120-129) -sending: 'x' (13@130-139) -thing (13@130-139): 'x' (13@130-139) -sending: ')' (14@140-149) -line (12@120-149): '(' (12@120-129) thing (13@130-139) ')' (14@140-149) -sending: '(' (15@150-159) -sending: 'x' (16@160-169) -thing (16@160-169): 'x' (16@160-169) -sending: ')' (17@170-179) -line (15@150-179): '(' (15@150-159) thing (16@160-169) ')' (17@170-179) -sending: '(' (18@180-189) -sending: 'x' (19@190-199) -thing (19@190-199): 'x' (19@190-199) -sending: ')' (20@200-209) -200.200-209.208: memory exhausted -Freeing nterm thing (19@190-199) -Freeing nterm line (15@150-179) -Freeing nterm line (12@120-149) -Freeing nterm line (9@90-119) -Freeing nterm line (6@60-89) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Parsing FAILED (status 2). -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +361. types.at:25: stdout: + ok +./actions.at:373: $PREPARSER ./input + +362. types.at:44: testing %yacc vs. api.value.type=union ... +./types.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./actions.at:1856: sed -ne '/ival:/p' stderr +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=error stderr: +./actions.at:1954: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: -335. actions.at:1047: ok -351. actions.at:1743: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:1051: $PREPARSER ./input '(x)' stderr: -input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] - 24 | %printer { #error "<*> printer should not be used" } <*> - | ^~~ -input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] - 24 | %printer { #error "<*> printer should not be used" } <*> - | ^~~ -input.y:33.3-23: error: unset value: $$ [-Werror=other] - 33 | { @$ = 4; } // Only used. - | ^~~~~~~~~~~~~~~~~~~~~ -input.y:32.3-23: error: unused value: $3 [-Werror=other] - 32 | { USE ($$); @$ = 3; } // Only set. - | ^~~~~~~~~~~~~~~~~~~~~ -./actions.at:1641: sed 's,.*/$,,' stderr 1>&2 - -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +1.1 +1.1: syntax error +./actions.at:1047: $PREPARSER ./input '!!!' +./actions.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:381: $PREPARSER ./input '0>0' +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +362. types.at:44: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -355. actions.at:1856: testing Qualified $$ in actions: glr.c ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -356. actions.at:1856: testing Qualified $$ in actions: lalr1.cc ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./actions.at:1048: $PREPARSER ./input '(x)' +./actions.at:1050: $PREPARSER ./input '(x)' +363. types.at:139: testing yacc.c api.value.type={double} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stderr: +stdout: +./headers.at:322: echo "x3" >>expout +stderr: +stderr: stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -9653,27 +9607,19 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./actions.at:563: $PREPARSER ./input -stderr: -./actions.at:1048: $PREPARSER ./input '!' -stderr: sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) +sending: '!' (1@10-19) +sending: '!' (2@20-29) +raise (5@10-29): ! (1@20-29) ! (2@20-29) +check-spontaneous-errors (5@10-29): error (@10-29) +sending: END (3@30-39) +Freeing token END (3@30-39) +Freeing nterm input (5@0-29) Successful parse. -./actions.at:563: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -334. actions.at:488: ok -./actions.at:1048: $PREPARSER ./input '!!!' -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +./headers.at:323: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x4.c x4.y +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -9683,169 +9629,95 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1048: $PREPARSER ./input '(y)' - -stderr: -stderr: -stdout: +1.1 +1.1: syntax error +./actions.at:373: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) sending: END (3@30-39) input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) +input (2@0-29): line (0@0-29) input (0@29-29) Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. +354. actions.at:1856: ok +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: +stdout: +./actions.at:563: $PREPARSER ./input +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Wnone,none -Werror --trace=none +364. types.at:139: testing yacc.c api.value.type={double} %header ... +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:370: $PREPARSER ./input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -1.1 -1.1: syntax error -./actions.at:370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1048: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: 'x' (2@20-29) -thing (2@20-29): 'x' (2@20-29) -sending: 'x' (3@30-39) -30.30-39.38: syntax error, unexpected 'x', expecting ')' -Freeing nterm thing (2@20-29) -Freeing nterm thing (1@10-19) -Freeing token 'x' (3@30-39) -sending: 'x' (4@40-49) -Freeing token 'x' (4@40-49) -sending: 'x' (5@50-59) -Freeing token 'x' (5@50-59) -sending: ')' (6@60-69) -line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) -sending: '(' (7@70-79) -sending: 'x' (8@80-89) -thing (8@80-89): 'x' (8@80-89) -sending: ')' (9@90-99) -line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) -sending: '(' (10@100-109) -sending: 'x' (11@110-119) -thing (11@110-119): 'x' (11@110-119) -sending: ')' (12@120-129) -line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) -sending: 'y' (13@130-139) -input (0@129-129): /* Nothing */ -input (2@100-129): line (10@100-129) input (0@129-129) -input (2@70-129): line (7@70-99) input (2@100-129) -input (2@0-129): line (-1@0-69) input (2@70-129) -130.130-139.138: syntax error, unexpected 'y', expecting END -Freeing nterm input (2@0-129) -Freeing token 'y' (13@130-139) -Parsing FAILED. -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -322. actions.at:370: ok -./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -357. actions.at:1856: testing Qualified $$ in actions: glr.cc ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] + 24 | %printer { #error "<*> printer should not be used" } <*> + | ^~~ +input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] + 24 | %printer { #error "<*> printer should not be used" } <*> + | ^~~ +input.y:33.3-23: error: unset value: $$ [-Werror=other] + 33 | { @$ = 4; } // Only used. + | ^~~~~~~~~~~~~~~~~~~~~ +input.y:32.3-23: error: unused value: $3 [-Werror=other] + 32 | { USE ($$); @$ = 3; } // Only set. + | ^~~~~~~~~~~~~~~~~~~~~ +./actions.at:563: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1050: $PREPARSER ./input '!' +./actions.at:1051: $PREPARSER ./input '!' +325. actions.at:373: ok +324. actions.at:372: ok stderr: stdout: -349. actions.at:1532: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file - +./actions.at:478: $PREPARSER ./input -./actions.at:1048: $PREPARSER ./input '(x)(x)x' -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: 'x' (6@60-69) -thing (6@60-69): 'x' (6@60-69) -sending: END (7@70-79) -70.70-79.78: syntax error, unexpected END, expecting 'x' -Freeing nterm thing (6@60-69) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Freeing token END (7@70-79) -Parsing FAILED. stdout: -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -352. actions.at:1744: ok -358. actions.at:1856: testing Qualified $$ in actions: glr2.cc ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:1048: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' + +349. actions.at:1532: ok stderr: -359. actions.at:1863: testing Destroying lookahead assigned by semantic action ... -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: '(' (6@60-69) -sending: 'x' (7@70-79) -thing (7@70-79): 'x' (7@70-79) -sending: ')' (8@80-89) -line (6@60-89): '(' (6@60-69) thing (7@70-79) ')' (8@80-89) -sending: '(' (9@90-99) -sending: 'x' (10@100-109) -thing (10@100-109): 'x' (10@100-109) -sending: ')' (11@110-119) -line (9@90-119): '(' (9@90-99) thing (10@100-109) ')' (11@110-119) -sending: '(' (12@120-129) -sending: 'x' (13@130-139) -thing (13@130-139): 'x' (13@130-139) -sending: ')' (14@140-149) -line (12@120-149): '(' (12@120-129) thing (13@130-139) ')' (14@140-149) -sending: '(' (15@150-159) -sending: 'x' (16@160-169) -thing (16@160-169): 'x' (16@160-169) -sending: ')' (17@170-179) -line (15@150-179): '(' (15@150-159) thing (16@160-169) ')' (17@170-179) -sending: '(' (18@180-189) -sending: 'x' (19@190-199) -thing (19@190-199): 'x' (19@190-199) -sending: ')' (20@200-209) -200.200-209.208: memory exhausted -Freeing nterm thing (19@190-199) -Freeing nterm line (15@150-179) -Freeing nterm line (12@120-149) -Freeing nterm line (9@90-119) -Freeing nterm line (6@60-89) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Parsing FAILED (status 2). -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./actions.at:1907: $PREPARSER ./input +365. types.at:139: testing yacc.c api.value.type={variant} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./actions.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -336. actions.at:1048: ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file - ok +366. types.at:139: testing yacc.c api.value.type={variant} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: -360. actions.at:1918: testing YYBACKUP ... stdout: -./actions.at:1953: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./actions.at:1479: $PREPARSER ./input --debug - +367. types.at:139: testing yacc.c api.value.type={struct foo} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./actions.at:1641: sed 's,.*/$,,' stderr 1>&2 +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +stderr: stderr: +stderr: +./conflicts.at:381: $PREPARSER ./input '0>0>0' +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. +'b' destructor +'a' destructor +./actions.at:1907: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -9888,86 +9760,208 @@ Cleanup: popping token error () Cleanup: popping token 'a' ('a') DESTROY 'a' -./actions.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. +stderr: +334. actions.at:488: ok +./actions.at:1047: $PREPARSER ./input '(y)' +./actions.at:1048: $PREPARSER ./input '(y)' +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:1856: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:1656: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -348. actions.at:1429: ok + +368. types.at:139: testing yacc.c api.value.type={struct foo} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=none -Werror --trace=none +stderr: +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: -361. types.at:25: testing %union vs. api.value.type ... -./types.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./actions.at:1121: $PREPARSER ./input --debug stderr: +sending: '(' (0@0-9) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) +sending: ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (-1@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. +syntax error, unexpected '>', expecting end of file +sending: '(' (0@0-9) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) +sending: ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (-1@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. +331. actions.at:478: ok +stderr: +stdout: + +stderr: +369. types.at:139: testing yacc.c api.value.type={struct bar} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stdout: +./actions.at:1416: $PREPARSER ./input0 --debug +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:323: $CC $CFLAGS $CPPFLAGS -c -o x4.o x4.c +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: Starting parse Entering state 0 Stack now 0 -Reading a token -Next token is token 'a' (1.1: <> printer for 'a' @ 1) -Shifting token 'a' (1.1: <> printer for 'a' @ 1) +Reducing stack by rule 1 (line 49): +-> $$ = nterm start (1.1: <> for 'S' @ 1) Entering state 1 Stack now 0 1 Reading a token -Next token is token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) -Shifting token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) -Entering state 3 -Stack now 0 1 3 -Reading a token -Next token is token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) -Shifting token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) -Entering state 5 -Stack now 0 1 3 5 -Reading a token -Next token is token 'd' (1.4: <> printer for 'd' @ 4) -Shifting token 'd' (1.4: <> printer for 'd' @ 4) -Entering state 6 -Stack now 0 1 3 5 6 -Reading a token Now at end of input. -1.5: syntax error, unexpected end of file, expecting 'e' -Error: popping token 'd' (1.4: <> printer for 'd' @ 4) -Stack now 0 1 3 5 -Error: popping token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) -Stack now 0 1 3 -Error: popping token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) -Stack now 0 1 -Error: popping token 'a' (1.1: <> printer for 'a' @ 1) -Stack now 0 -Cleanup: discarding lookahead token "end of file" (1.5: ) -Stack now 0 -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -286. synclines.at:440: ok +Shifting token END (1.1: <> for 'E' @ 1) +Entering state 2 +Stack now 0 1 2 +Stack now 0 1 2 +Cleanup: popping token END (1.1: <> for 'E' @ 1) +Cleanup: popping nterm start (1.1: <> for 'S' @ 1) +./actions.at:1051: $PREPARSER ./input '!!!' +./actions.at:1050: $PREPARSER ./input '!!!' +348. actions.at:1429: ok +359. actions.at:1863: ok -./actions.at:1954: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1121: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -345. actions.at:1071: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +371. types.at:139: testing yacc.c api.value.type={union foo} ... +370. types.at:139: testing yacc.c api.value.type={struct bar} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./conflicts.at:753: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -stdout: -./actions.at:478: $PREPARSER ./input stderr: -362. types.at:44: testing %yacc vs. api.value.type=union ... -./types.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -363. types.at:139: testing yacc.c api.value.type={double} ... -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -361. types.at:25: ok -330. actions.at:478: ok +sending: '!' (0@0-9) +sending: '!' (1@10-19) +sending: '!' (2@20-29) +raise (5@10-29): ! (1@20-29) ! (2@20-29) +check-spontaneous-errors (5@10-29): error (@10-29) +sending: END (3@30-39) +Freeing token END (3@30-39) +Freeing nterm input (5@0-29) +Successful parse. +./actions.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +sending: '!' (0@0-9) +sending: '!' (1@10-19) +sending: '!' (2@20-29) +raise (5@10-29): ! (1@20-29) ! (2@20-29) +check-spontaneous-errors (5@10-29): error (@10-29) +sending: END (3@30-39) +Freeing token END (3@30-39) +Freeing nterm input (5@0-29) +Successful parse. +./actions.at:1048: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1047: $PREPARSER ./input '(xxxxx)(x)(x)y' +./conflicts.at:381: $PREPARSER ./input '0<0>0' stderr: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stdout: -stderr: ./actions.at:1238: $PREPARSER ./input --debug -stdout: +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: stderr: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: 'x' (2@20-29) +thing (2@20-29): 'x' (2@20-29) +sending: 'x' (3@30-39) +30.30-39.38: syntax error, unexpected 'x', expecting ')' +Freeing nterm thing (2@20-29) +Freeing nterm thing (1@10-19) +Freeing token 'x' (3@30-39) +sending: 'x' (4@40-49) +Freeing token 'x' (4@40-49) +sending: 'x' (5@50-59) +Freeing token 'x' (5@50-59) +sending: ')' (6@60-69) +line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) +sending: '(' (7@70-79) +sending: 'x' (8@80-89) +thing (8@80-89): 'x' (8@80-89) +sending: ')' (9@90-99) +line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) +sending: '(' (10@100-109) +sending: 'x' (11@110-119) +thing (11@110-119): 'x' (11@110-119) +sending: ')' (12@120-129) +line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) +sending: 'y' (13@130-139) +input (0@129-129): /* Nothing */ +input (2@100-129): line (10@100-129) input (0@129-129) +input (2@70-129): line (7@70-99) input (2@100-129) +input (2@0-129): line (-1@0-69) input (2@70-129) +130.130-139.138: syntax error, unexpected 'y', expecting END +Freeing nterm input (2@0-129) +Freeing token 'y' (13@130-139) +Parsing FAILED. +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: 'x' (2@20-29) +thing (2@20-29): 'x' (2@20-29) +sending: 'x' (3@30-39) +30.30-39.38: syntax error, unexpected 'x', expecting ')' +Freeing nterm thing (2@20-29) +Freeing nterm thing (1@10-19) +Freeing token 'x' (3@30-39) +sending: 'x' (4@40-49) +Freeing token 'x' (4@40-49) +sending: 'x' (5@50-59) +Freeing token 'x' (5@50-59) +sending: ')' (6@60-69) +line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) +sending: '(' (7@70-79) +sending: 'x' (8@80-89) +thing (8@80-89): 'x' (8@80-89) +sending: ')' (9@90-99) +line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) +sending: '(' (10@100-109) +sending: 'x' (11@110-119) +thing (11@110-119): 'x' (11@110-119) +sending: ')' (12@120-129) +line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) +sending: 'y' (13@130-139) +input (0@129-129): /* Nothing */ +input (2@100-129): line (10@100-129) input (0@129-129) +input (2@70-129): line (7@70-99) input (2@100-129) +input (2@0-129): line (-1@0-69) input (2@70-129) +130.130-139.138: syntax error, unexpected 'y', expecting END +Freeing nterm input (2@0-129) +Freeing token 'y' (13@130-139) +Parsing FAILED. Starting parse Entering state 0 Stack now 0 @@ -10018,307 +10012,337 @@ Stack now 0 Cleanup: discarding lookahead token "end of file" () Stack now 0 -./headers.at:322: echo "x3" >>expout - -./actions.at:1238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -364. types.at:139: testing yacc.c api.value.type={double} %header ... -./headers.at:323: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x4.c x4.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -346. actions.at:1174: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -365. types.at:139: testing yacc.c api.value.type={variant} ... -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -366. types.at:139: testing yacc.c api.value.type={variant} %header ... -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -367. types.at:139: testing yacc.c api.value.type={struct foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -362. types.at:44: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS - +stdout: +./conflicts.at:1096: $PREPARSER ./input ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input1.c input1.y stderr: stdout: -./headers.at:323: $CC $CFLAGS $CPPFLAGS -c -o x4.o x4.c -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -353. actions.at:1745: ok -368. types.at:139: testing yacc.c api.value.type={struct foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: -258. conflicts.at:2331: stdout: - ok +stdout: ./actions.at:1856: $PREPARSER ./input --debug stderr: +stdout: +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1121: $PREPARSER ./input --debug +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +stderr: +stderr: +./actions.at:1238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token 'a' (1.1: <> printer for 'a' @ 1) +Shifting token 'a' (1.1: <> printer for 'a' @ 1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) +Shifting token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) +Entering state 3 +Stack now 0 1 3 +Reading a token +Next token is token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) +Shifting token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) +Entering state 5 +Stack now 0 1 3 5 +Reading a token +Next token is token 'd' (1.4: <> printer for 'd' @ 4) +Shifting token 'd' (1.4: <> printer for 'd' @ 4) +Entering state 6 +Stack now 0 1 3 5 6 +Reading a token +Now at end of input. +1.5: syntax error, unexpected end of file, expecting 'e' +Error: popping token 'd' (1.4: <> printer for 'd' @ 4) +Stack now 0 1 3 5 +Error: popping token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) +Stack now 0 1 3 +Error: popping token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) +Stack now 0 1 +Error: popping token 'a' (1.1: <> printer for 'a' @ 1) +Stack now 0 +Cleanup: discarding lookahead token "end of file" (1.5: ) +Stack now 0 +Starting parse +Entering state 0 +Reading a token Next token is token UNTYPED (ival: 10, fval: 0.1) Shifting token UNTYPED (ival: 10, fval: 0.1) Entering state 1 -Stack now 0 1 Reading a token Next token is token INT (ival: 20, fval: 0.2) Shifting token INT (ival: 20, fval: 0.2) Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 53): +Reducing stack 0 by rule 1 (line 53): $1 = token UNTYPED (ival: 10, fval: 0.1) $2 = token INT (ival: 20, fval: 0.2) -> $$ = nterm float (ival: 30, fval: 0.3) Entering state 2 -Stack now 0 2 Reading a token Now at end of input. Shifting token "end of file" () Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) -stderr: +syntax error, unexpected '>', expecting end of file ./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./actions.at:1050: $PREPARSER ./input '(y)' +./actions.at:1051: $PREPARSER ./input '(y)' stdout: +306. headers.at:187: ok - -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -370. types.at:139: testing yacc.c api.value.type={struct bar} %header ... -369. types.at:139: testing yacc.c api.value.type={struct bar} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: +stderr: +stdout: stdout: +308. headers.at:189: ./types.at:139: $PREPARSER ./test + ok +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Werror +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +372. types.at:139: testing yacc.c api.value.type={union foo} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stderr: +stderr: +./actions.at:1121: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -./actions.at:1416: $PREPARSER ./input0 --debug Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token UNTYPED (ival: 10, fval: 0.1) Shifting token UNTYPED (ival: 10, fval: 0.1) Entering state 1 -Stack now 0 1 Reading a token Next token is token INT (ival: 20, fval: 0.2) Shifting token INT (ival: 20, fval: 0.2) Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 53): +Reducing stack 0 by rule 1 (line 53): $1 = token UNTYPED (ival: 10, fval: 0.1) $2 = token INT (ival: 20, fval: 0.2) -> $$ = nterm float (ival: 30, fval: 0.3) Entering state 2 -Stack now 0 2 Reading a token Now at end of input. Shifting token "end of file" () Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 49): --> $$ = nterm start (1.1: <> for 'S' @ 1) -Entering state 1 -Stack now 0 1 -Reading a token -Now at end of input. -Shifting token END (1.1: <> for 'E' @ 1) -Entering state 2 -Stack now 0 1 2 -Stack now 0 1 2 -Cleanup: popping token END (1.1: <> for 'E' @ 1) -Cleanup: popping nterm start (1.1: <> for 'S' @ 1) -stdout: -./actions.at:1856: sed -ne '/ival:/p' stderr -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./actions.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./actions.at:1657: $PREPARSER ./input --debug -354. actions.at:1856: ok -stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 30): --> $$ = nterm $@1 (: ) -Entering state 2 -Stack now 0 2 -Reducing stack by rule 2 (line 31): --> $$ = nterm @2 (: 2) -Entering state 4 -Stack now 0 2 4 -Reducing stack by rule 3 (line 32): --> $$ = nterm @3 (: 3) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 4 (line 33): --> $$ = nterm @4 (: 4) -Entering state 6 -Stack now 0 2 4 5 6 -Reading a token -Now at end of input. -syntax error -Error: popping nterm @4 (: 4) -DESTROY 4 -Stack now 0 2 4 5 -Error: popping nterm @3 (: 3) -DESTROY 3 -Stack now 0 2 4 -Error: popping nterm @2 (: 2) -DESTROY 2 -Stack now 0 2 -Error: popping nterm $@1 (: ) -Stack now 0 -Cleanup: discarding lookahead token "end of file" (: ) -Stack now 0 -./actions.at:1657: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input1.c input1.y -stderr: -stdout: +sending: '(' (0@0-9) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) +sending: ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (-1@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. +sending: '(' (0@0-9) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) +sending: ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (-1@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. +./actions.at:1048: $PREPARSER ./input '(x)(x)x' -350. actions.at:1596: ok -stderr: -./actions.at:1907: $PREPARSER ./input -stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +346. actions.at:1174: ./actions.at:1047: $PREPARSER ./input '(x)(x)x' + ok -371. types.at:139: testing yacc.c api.value.type={union foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -stderr: -'b' destructor -'a' destructor -./actions.at:1907: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -372. types.at:139: testing yacc.c api.value.type={union foo} %header ... +373. types.at:139: testing yacc.c %union { float fval; int ival; }; ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -359. actions.at:1863: ok -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS - stderr: stdout: ./actions.at:1955: $PREPARSER ./input -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Werror -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -373. types.at:139: testing yacc.c %union { float fval; int ival; }; ... -./actions.at:1955: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +374. types.at:139: testing yacc.c %union { float fval; int ival; }; %header ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -stderr: -stdout: input1.y:30.3-4: error: useless %destructor for type <> [-Werror=other] input1.y:30.3-4: error: useless %printer for type <> [-Werror=other] -360. actions.at:1918: ok -308. headers.at:189: ok -./actions.at:1417: sed 's,.*/$,,' stderr 1>&2 -stderr: +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1856: sed -ne '/ival:/p' stderr ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - -364. types.at:139: ok +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=error -./actions.at:478: $PREPARSER ./input stderr: -374. types.at:139: testing yacc.c %union { float fval; int ival; }; %header ... stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stdout: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: 'x' (6@60-69) +thing (6@60-69): 'x' (6@60-69) +sending: END (7@70-79) +70.70-79.78: syntax error, unexpected END, expecting 'x' +Freeing nterm thing (6@60-69) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Freeing token END (7@70-79) +Parsing FAILED. +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: 'x' (6@60-69) +thing (6@60-69): 'x' (6@60-69) +sending: END (7@70-79) +70.70-79.78: syntax error, unexpected END, expecting 'x' +Freeing nterm thing (6@60-69) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Freeing token END (7@70-79) +Parsing FAILED. +363. types.at:139: ok +./conflicts.at:388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.c input.y +./actions.at:1955: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +237. conflicts.at:1096: ok +345. actions.at:1071: ok + + -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: ./types.at:139: $PREPARSER ./test -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: H i . J K $end - Shift derivation - $accept - `-> 0: a $end - `-> 2: H i - `-> 4: i . J K - Second example: H i . J $end - Reduce derivation - $accept - `-> 0: s $end - `-> 1: a J - `-> 2: H i . -input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr 375. types.at:139: testing yacc.c %union foo { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -stderr: 376. types.at:139: testing yacc.c %union foo { float fval; int ival; }; %header ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +377. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1417: sed 's,.*/$,,' stderr 1>&2 ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./actions.at:1656: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./counterexample.at:451: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stdout: -stderr: +./actions.at:1050: $PREPARSER ./input '(xxxxx)(x)(x)y' +355. actions.at:1856: 360. actions.at:1918: stdout: + ok stderr: -287. synclines.at:440: ok -332. actions.at:478: stdout: +./actions.at:1051: $PREPARSER ./input '(xxxxx)(x)(x)y' ok -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test +./actions.at:1053: $PREPARSER ./input '(x)' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -377. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y 378. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -363. types.at:139: ok +379. types.at:139: testing yacc.c api.value.type=union ... +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Wnone,none -Werror --trace=none -stdout: -./actions.at:1051: $PREPARSER ./input '(x)' stderr: -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -365. types.at:139: ok - stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) +sending: 'x' (2@20-29) +thing (2@20-29): 'x' (2@20-29) +sending: 'x' (3@30-39) +30.30-39.38: syntax error, unexpected 'x', expecting ')' +Freeing nterm thing (2@20-29) +Freeing nterm thing (1@10-19) +Freeing token 'x' (3@30-39) +sending: 'x' (4@40-49) +Freeing token 'x' (4@40-49) +sending: 'x' (5@50-59) +Freeing token 'x' (5@50-59) +sending: ')' (6@60-69) +line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) +sending: '(' (7@70-79) +sending: 'x' (8@80-89) +thing (8@80-89): 'x' (8@80-89) +sending: ')' (9@90-99) +line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) +sending: '(' (10@100-109) +sending: 'x' (11@110-119) +thing (11@110-119): 'x' (11@110-119) +sending: ')' (12@120-129) +line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) +sending: 'y' (13@130-139) +input (0@129-129): /* Nothing */ +input (2@100-129): line (10@100-129) input (0@129-129) +input (2@70-129): line (7@70-99) input (2@100-129) +input (2@0-129): line (-1@0-69) input (2@70-129) +130.130-139.138: syntax error, unexpected 'y', expecting END +Freeing nterm input (2@0-129) +Freeing token 'y' (13@130-139) +Parsing FAILED. +stdout: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: 'x' (2@20-29) +thing (2@20-29): 'x' (2@20-29) +sending: 'x' (3@30-39) +30.30-39.38: syntax error, unexpected 'x', expecting ')' +Freeing nterm thing (2@20-29) +Freeing nterm thing (1@10-19) +Freeing token 'x' (3@30-39) +sending: 'x' (4@40-49) +Freeing token 'x' (4@40-49) +sending: 'x' (5@50-59) +Freeing token 'x' (5@50-59) +sending: ')' (6@60-69) +line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) +sending: '(' (7@70-79) +sending: 'x' (8@80-89) +thing (8@80-89): 'x' (8@80-89) +sending: ')' (9@90-99) +line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) +sending: '(' (10@100-109) +sending: 'x' (11@110-119) +thing (11@110-119): 'x' (11@110-119) +sending: ')' (12@120-129) +line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) +sending: 'y' (13@130-139) +input (0@129-129): /* Nothing */ +input (2@100-129): line (10@100-129) input (0@129-129) +input (2@70-129): line (7@70-99) input (2@100-129) +input (2@0-129): line (-1@0-69) input (2@70-129) +130.130-139.138: syntax error, unexpected 'y', expecting END +Freeing nterm input (2@0-129) +Freeing token 'y' (13@130-139) +Parsing FAILED. +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) sending: ')' (2@20-29) line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) sending: END (3@30-39) @@ -10327,63 +10351,133 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1047: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' +./actions.at:1048: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' stderr: -366. types.at:139: 367. types.at:139: ok stdout: -306. headers.at:187: ok +287. synclines.at:440: ok +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=error +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: -./actions.at:1051: $PREPARSER ./input '!' -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=none -Werror --trace=none - ok -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./actions.at:1050: $PREPARSER ./input '(x)' - +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: '(' (6@60-69) +sending: 'x' (7@70-79) +thing (7@70-79): 'x' (7@70-79) +sending: ')' (8@80-89) +line (6@60-89): '(' (6@60-69) thing (7@70-79) ')' (8@80-89) +sending: '(' (9@90-99) +sending: 'x' (10@100-109) +thing (10@100-109): 'x' (10@100-109) +sending: ')' (11@110-119) +line (9@90-119): '(' (9@90-99) thing (10@100-109) ')' (11@110-119) +sending: '(' (12@120-129) +sending: 'x' (13@130-139) +thing (13@130-139): 'x' (13@130-139) +sending: ')' (14@140-149) +line (12@120-149): '(' (12@120-129) thing (13@130-139) ')' (14@140-149) +sending: '(' (15@150-159) +sending: 'x' (16@160-169) +thing (16@160-169): 'x' (16@160-169) +sending: ')' (17@170-179) +line (15@150-179): '(' (15@150-159) thing (16@160-169) ')' (17@170-179) +sending: '(' (18@180-189) +sending: 'x' (19@190-199) +thing (19@190-199): 'x' (19@190-199) +sending: ')' (20@200-209) +200.200-209.208: memory exhausted +Freeing nterm thing (19@190-199) +Freeing nterm line (15@150-179) +Freeing nterm line (12@120-149) +Freeing nterm line (9@90-119) +Freeing nterm line (6@60-89) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Parsing FAILED (status 2). +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: '(' (6@60-69) +sending: 'x' (7@70-79) +thing (7@70-79): 'x' (7@70-79) +sending: ')' (8@80-89) +line (6@60-89): '(' (6@60-69) thing (7@70-79) ')' (8@80-89) +sending: '(' (9@90-99) +sending: 'x' (10@100-109) +thing (10@100-109): 'x' (10@100-109) +sending: ')' (11@110-119) +line (9@90-119): '(' (9@90-99) thing (10@100-109) ')' (11@110-119) +sending: '(' (12@120-129) +sending: 'x' (13@130-139) +thing (13@130-139): 'x' (13@130-139) +sending: ')' (14@140-149) +line (12@120-149): '(' (12@120-129) thing (13@130-139) ')' (14@140-149) +sending: '(' (15@150-159) +sending: 'x' (16@160-169) +thing (16@160-169): 'x' (16@160-169) +sending: ')' (17@170-179) +line (15@150-179): '(' (15@150-159) thing (16@160-169) ')' (17@170-179) +sending: '(' (18@180-189) +sending: 'x' (19@190-199) +thing (19@190-199): 'x' (19@190-199) +sending: ')' (20@200-209) +200.200-209.208: memory exhausted +Freeing nterm thing (19@190-199) +Freeing nterm line (15@150-179) +Freeing nterm line (12@120-149) +Freeing nterm line (9@90-119) +Freeing nterm line (6@60-89) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Parsing FAILED (status 2). +./types.at:139: $PREPARSER ./test +364. types.at:139: ok +./actions.at:1053: $PREPARSER ./input '!' +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -stderr: -379. types.at:139: testing yacc.c api.value.type=union ... -stdout: -382. types.at:139: testing glr.c api.value.type={double} %header ... -./actions.at:478: $PREPARSER ./input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -381. types.at:139: testing glr.c api.value.type={double} ... -383. types.at:139: testing glr.c api.value.type={variant} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y 380. types.at:139: testing yacc.c api.value.type=union %header ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: +381. types.at:139: testing glr.c api.value.type={double} ... ./types.at:139: $PREPARSER ./test -stderr: -stderr: -stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: +./actions.at:1054: $PREPARSER ./input '(x)' +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stdout: +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./conflicts.at:388: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./types.at:139: $PREPARSER ./test stderr: -./actions.at:1856: $PREPARSER ./input --debug stderr: -368. types.at:139: ok -stdout: sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -10391,7 +10485,6 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. -stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10403,78 +10496,101 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1051: $PREPARSER ./input '(x)(x)x' +./actions.at:1050: $PREPARSER ./input '(x)(x)x' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $PREPARSER ./input '(x)' -369. types.at:139: ok -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Wnone,none -Werror --trace=none stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: -384. types.at:139: testing glr.c api.value.type={variant} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -331. actions.at:478: ./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: - ok stdout: -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 53): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +stderr: +stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) sending: ')' (2@20-29) line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -stdout: -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: 'x' (6@60-69) +thing (6@60-69): 'x' (6@60-69) +sending: END (7@70-79) +70.70-79.78: syntax error, unexpected END, expecting 'x' +Freeing nterm thing (6@60-69) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Freeing token END (7@70-79) +Parsing FAILED. +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: 'x' (6@60-69) +thing (6@60-69): 'x' (6@60-69) +sending: END (7@70-79) +70.70-79.78: syntax error, unexpected END, expecting 'x' +Freeing nterm thing (6@60-69) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Freeing token END (7@70-79) +Parsing FAILED. ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -370. types.at:139: ok -385. types.at:139: testing glr.c api.value.type={struct foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./actions.at:1050: $PREPARSER ./input '!' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1054: $PREPARSER ./input '!' +335. actions.at:1047: ok +369. types.at:139: ok +336. actions.at:1048: ok +367. types.at:139: ok -./actions.at:1417: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input1 input1.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + + +stderr: +stdout: +./conflicts.at:754: $PREPARSER ./input +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=none -Werror --trace=none +382. types.at:139: testing glr.c api.value.type={double} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stderr: +385. types.at:139: testing glr.c api.value.type={struct foo} ... +stdout: +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: -./actions.at:1051: $PREPARSER ./input '!!!' stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +syntax error, unexpected 'a', expecting 'b' or 'c' +stderr: +384. types.at:139: testing glr.c api.value.type={variant} %header ... +366. types.at:139: ok +./actions.at:1053: $PREPARSER ./input '!!!' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -10482,113 +10598,43 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. -387. types.at:139: testing glr.c api.value.type={struct bar} ... -386. types.at:139: testing glr.c api.value.type={struct foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 53): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) +383. types.at:139: testing glr.c api.value.type={variant} ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $PREPARSER ./input '!' -371. types.at:139: ok +365. types.at:139: ok + +386. types.at:139: testing glr.c api.value.type={struct foo} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +387. types.at:139: testing glr.c api.value.type={struct bar} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stderr: +stderr: +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: +stdout: stderr: +./actions.at:1056: $PREPARSER ./input '(x)' stdout: +./actions.at:1057: $PREPARSER ./input '(x)' +./conflicts.at:754: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./actions.at:1856: sed -ne '/ival:/p' stderr -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./actions.at:372: $PREPARSER ./input stderr: -372. types.at:139: stderr: stderr: -sending: '!' (0@0-9) -sending: '!' (1@10-19) -sending: '!' (2@20-29) -raise (5@10-29): ! (1@20-29) ! (2@20-29) -check-spontaneous-errors (5@10-29): error (@10-29) -sending: END (3@30-39) -Freeing token END (3@30-39) -Freeing nterm input (5@0-29) -Successful parse. -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1 -1.1: syntax error -355. actions.at:1856: ./actions.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -373. types.at:139: ok - ok - ok -./actions.at:1051: $PREPARSER ./input '(y)' -388. types.at:139: testing glr.c api.value.type={struct bar} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1050: $PREPARSER ./input '!!!' stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: -sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) -sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -sending: '!' (0@0-9) -sending: '!' (1@10-19) -sending: '!' (2@20-29) -raise (5@10-29): ! (1@20-29) ! (2@20-29) -check-spontaneous-errors (5@10-29): error (@10-29) -sending: END (3@30-39) -Freeing token END (3@30-39) -Freeing nterm input (5@0-29) -Successful parse. -./actions.at:1054: $PREPARSER ./input '!!!' -324. actions.at:372: - -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok stderr: -stdout: stderr: -stdout: stderr: -./types.at:139: $PREPARSER ./test - sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -10598,350 +10644,184 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. -./actions.at:1051: $PREPARSER ./input '(xxxxx)(x)(x)y' -./types.at:139: $PREPARSER ./test -./actions.at:1050: $PREPARSER ./input '(y)' -389. types.at:139: testing glr.c api.value.type={union foo} ... -390. types.at:139: testing glr.c api.value.type={union foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -391. types.at:139: testing glr.c %union { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -392. types.at:139: testing glr.c %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) sending: END (3@30-39) input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) +input (2@0-29): line (0@0-29) input (0@29-29) Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1054: $PREPARSER ./input '(y)' -stderr: -stderr: -stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) -sending: 'x' (2@20-29) -thing (2@20-29): 'x' (2@20-29) -sending: 'x' (3@30-39) -30.30-39.38: syntax error, unexpected 'x', expecting ')' -Freeing nterm thing (2@20-29) -Freeing nterm thing (1@10-19) -Freeing token 'x' (3@30-39) -sending: 'x' (4@40-49) -Freeing token 'x' (4@40-49) -sending: 'x' (5@50-59) -Freeing token 'x' (5@50-59) -sending: ')' (6@60-69) -line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) -sending: '(' (7@70-79) -sending: 'x' (8@80-89) -thing (8@80-89): 'x' (8@80-89) -sending: ')' (9@90-99) -line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) -sending: '(' (10@100-109) -sending: 'x' (11@110-119) -thing (11@110-119): 'x' (11@110-119) -sending: ')' (12@120-129) -line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) -sending: 'y' (13@130-139) -input (0@129-129): /* Nothing */ -input (2@100-129): line (10@100-129) input (0@129-129) -input (2@70-129): line (7@70-99) input (2@100-129) -input (2@0-129): line (-1@0-69) input (2@70-129) -130.130-139.138: syntax error, unexpected 'y', expecting END -Freeing nterm input (2@0-129) -Freeing token 'y' (13@130-139) -Parsing FAILED. -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: $PREPARSER ./input -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -./actions.at:1050: $PREPARSER ./input '(xxxxx)(x)(x)y' -sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) sending: END (3@30-39) input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) +input (2@0-29): line (0@0-29) input (0@29-29) Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -stderr: -./actions.at:1051: $PREPARSER ./input '(x)(x)x' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +370. types.at:139: 337. actions.at:1050: ok + ok +338. actions.at:1051: ok + + + +388. types.at:139: testing glr.c api.value.type={struct bar} %header ... +389. types.at:139: testing glr.c api.value.type={union foo} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +390. types.at:139: testing glr.c api.value.type={union foo} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1417: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input1 input1.c $LIBS +368. types.at:139: 373. types.at:139: ok + ok +./actions.at:1057: $PREPARSER ./input '!' +371. types.at:139: ok +./actions.at:1054: $PREPARSER ./input '!!!' 374. types.at:139: ok -376. types.at:139: ok -syntax error -stderr: -stdout: +228. conflicts.at:676: ok + + + -./actions.at:373: $PREPARSER ./input -stderr: -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: stderr: -./types.at:139: $PREPARSER ./test -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: 'x' (6@60-69) -thing (6@60-69): 'x' (6@60-69) -sending: END (7@70-79) -70.70-79.78: syntax error, unexpected END, expecting 'x' -Freeing nterm thing (6@60-69) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Freeing token END (7@70-79) -Parsing FAILED. -393. types.at:139: testing glr.c %union foo { float fval; int ival; }; ... stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: 'x' (2@20-29) -thing (2@20-29): 'x' (2@20-29) -sending: 'x' (3@30-39) -30.30-39.38: syntax error, unexpected 'x', expecting ')' -Freeing nterm thing (2@20-29) -Freeing nterm thing (1@10-19) -Freeing token 'x' (3@30-39) -sending: 'x' (4@40-49) -Freeing token 'x' (4@40-49) -sending: 'x' (5@50-59) -Freeing token 'x' (5@50-59) -sending: ')' (6@60-69) -line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) -sending: '(' (7@70-79) -sending: 'x' (8@80-89) -thing (8@80-89): 'x' (8@80-89) -sending: ')' (9@90-99) -line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) -sending: '(' (10@100-109) -sending: 'x' (11@110-119) -thing (11@110-119): 'x' (11@110-119) -sending: ')' (12@120-129) -line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) -sending: 'y' (13@130-139) -input (0@129-129): /* Nothing */ -input (2@100-129): line (10@100-129) input (0@129-129) -input (2@70-129): line (7@70-99) input (2@100-129) -input (2@0-129): line (-1@0-69) input (2@70-129) -130.130-139.138: syntax error, unexpected 'y', expecting END -Freeing nterm input (2@0-129) -Freeing token 'y' (13@130-139) -Parsing FAILED. -1.1 -1.1: syntax error stderr: -./actions.at:373: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +391. types.at:139: testing glr.c %union { float fval; int ival; }; ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./actions.at:1054: $PREPARSER ./input '(xxxxx)(x)(x)y' stdout: -./headers.at:323: echo "x4" >>expout +393. types.at:139: testing glr.c %union foo { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +./actions.at:1657: $PREPARSER ./input --debug +392. types.at:139: testing glr.c %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y 394. types.at:139: testing glr.c %union foo { float fval; int ival; }; %header ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $PREPARSER ./test +395. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -./headers.at:324: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x5.cc x5.y stderr: -377. types.at:139: ok -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -237. conflicts.at:1096: ok stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: 'x' (2@20-29) -thing (2@20-29): 'x' (2@20-29) -sending: 'x' (3@30-39) -30.30-39.38: syntax error, unexpected 'x', expecting ')' -Freeing nterm thing (2@20-29) -Freeing nterm thing (1@10-19) -Freeing token 'x' (3@30-39) -sending: 'x' (4@40-49) -Freeing token 'x' (4@40-49) -sending: 'x' (5@50-59) -Freeing token 'x' (5@50-59) -sending: ')' (6@60-69) -line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) -sending: '(' (7@70-79) -sending: 'x' (8@80-89) -thing (8@80-89): 'x' (8@80-89) -sending: ')' (9@90-99) -line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) -sending: '(' (10@100-109) -sending: 'x' (11@110-119) -thing (11@110-119): 'x' (11@110-119) -sending: ')' (12@120-129) -line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) -sending: 'y' (13@130-139) -input (0@129-129): /* Nothing */ -input (2@100-129): line (10@100-129) input (0@129-129) -input (2@70-129): line (7@70-99) input (2@100-129) -input (2@0-129): line (-1@0-69) input (2@70-129) -130.130-139.138: syntax error, unexpected 'y', expecting END -Freeing nterm input (2@0-129) -Freeing token 'y' (13@130-139) -Parsing FAILED. -325. actions.at:373: ok -./actions.at:1050: $PREPARSER ./input '(x)(x)x' +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 30): +-> $$ = nterm $@1 (: ) +Entering state 2 +Stack now 0 2 +Reducing stack by rule 2 (line 31): +-> $$ = nterm @2 (: 2) +Entering state 4 +Stack now 0 2 4 +Reducing stack by rule 3 (line 32): +-> $$ = nterm @3 (: 3) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 4 (line 33): +-> $$ = nterm @4 (: 4) +Entering state 6 +Stack now 0 2 4 5 6 +Reading a token +Now at end of input. +syntax error +Error: popping nterm @4 (: 4) +DESTROY 4 +Stack now 0 2 4 5 +Error: popping nterm @3 (: 3) +DESTROY 3 +Stack now 0 2 4 +Error: popping nterm @2 (: 2) +DESTROY 2 +Stack now 0 2 +Error: popping nterm $@1 (: ) +Stack now 0 +Cleanup: discarding lookahead token "end of file" (: ) +Stack now 0 +stderr: +sending: '!' (0@0-9) +sending: '!' (1@10-19) +sending: '!' (2@20-29) +raise (5@10-29): ! (1@20-29) ! (2@20-29) +check-spontaneous-errors (5@10-29): error (@10-29) +sending: END (3@30-39) +Freeing token END (3@30-39) +Freeing nterm input (5@0-29) +Successful parse. +./actions.at:1053: $PREPARSER ./input '(y)' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. +./actions.at:1056: $PREPARSER ./input '!' +stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - - +./actions.at:1856: $PREPARSER ./input --debug +stderr: stderr: -395. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; ... +stdout: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -397. types.at:139: testing glr.c api.value.type=union ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -396. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $PREPARSER ./test +./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: 'x' (6@60-69) -thing (6@60-69): 'x' (6@60-69) -sending: END (7@70-79) -70.70-79.78: syntax error, unexpected END, expecting 'x' -Freeing nterm thing (6@60-69) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Freeing token END (7@70-79) -Parsing FAILED. ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -375. types.at:139: ok -./actions.at:1054: $PREPARSER ./input '(x)(x)x' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -338. actions.at:1051: 337. actions.at:1050: 378. types.at:139: ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS - ok - ok - ok +./actions.at:1657: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -./headers.at:324: $CXX $CPPFLAGS $CXXFLAGS -c -o x5.o x5.cc -stderr: -stderr: -stdout: ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stdout: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: 'x' (6@60-69) -thing (6@60-69): 'x' (6@60-69) -sending: END (7@70-79) -70.70-79.78: syntax error, unexpected END, expecting 'x' -Freeing nterm thing (6@60-69) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Freeing token END (7@70-79) -Parsing FAILED. -./types.at:139: $PREPARSER ./test -379. types.at:139: ok - - - - stderr: -stdout: -./actions.at:1057: $PREPARSER ./input '(x)' - stderr: -stdout: -./actions.at:1856: $PREPARSER ./input --debug -398. types.at:139: testing glr.c api.value.type=union %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -401. types.at:139: testing lalr1.cc api.value.type={variant} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -399. types.at:139: testing lalr1.cc api.value.type={double} ... -402. types.at:139: testing lalr1.cc api.value.type={variant} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -400. types.at:139: testing lalr1.cc api.value.type={double} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) sending: END (3@30-39) input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) +input (2@0-29): line (-1@0-29) input (0@29-29) Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +375. types.at:139: ok stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse @@ -10972,16 +10852,44 @@ Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: H i . J K $end + Shift derivation + $accept + `-> 0: a $end + `-> 2: H i + `-> 4: i . J K + Second example: H i . J $end + Reduce derivation + $accept + `-> 0: s $end + `-> 1: a J + `-> 2: H i . +input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +stderr: +stdout: +stderr: +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: $PREPARSER ./test +396. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -380. types.at:139: ok +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse +stderr: +stderr: +377. types.at:139: Starting parse Entering state 0 Stack now 0 Reading a token @@ -11008,28 +10916,129 @@ Stack now 0 2 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) -340. actions.at:1054: ok + ok +./actions.at:1057: $PREPARSER ./input '!!!' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +372. types.at:139: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +./counterexample.at:451: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./actions.at:1054: $PREPARSER ./input '(y)' +350. actions.at:1596: ok + + + +stderr: +stderr: +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./conflicts.at:388: $PREPARSER ./input '0<0' +398. types.at:139: testing glr.c api.value.type=union %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +399. types.at:139: testing lalr1.cc api.value.type={double} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +397. types.at:139: testing glr.c api.value.type=union ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +stderr: ./actions.at:1856: sed -ne '/ival:/p' stderr -./actions.at:1057: $PREPARSER ./input '!' stderr: -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +376. types.at:139: ok +./actions.at:1056: $PREPARSER ./input '!!!' +sending: '(' (0@0-9) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) +sending: ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (-1@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) Successful parse. +./actions.at:1053: $PREPARSER ./input '(xxxxx)(x)(x)y' +378. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +380. types.at:139: ok + stderr: stdout: -356. actions.at:1856: ./actions.at:1856: $PREPARSER ./input --debug - ok -./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./actions.at:1856: $PREPARSER ./input --debug + +400. types.at:139: testing lalr1.cc api.value.type={double} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +401. types.at:139: testing lalr1.cc api.value.type={variant} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +402. types.at:139: testing lalr1.cc api.value.type={variant} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./actions.at:1057: $PREPARSER ./input '!!!' - -./actions.at:1417: $PREPARSER ./input1 --debug +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +./conflicts.at:388: $PREPARSER ./input '0<0<0' +stderr: +stderr: +sending: '!' (0@0-9) +sending: '!' (1@10-19) +sending: '!' (2@20-29) +raise (5@10-29): ! (1@20-29) ! (2@20-29) +check-spontaneous-errors (5@10-29): error (@10-29) +sending: END (3@30-39) +Freeing token END (3@30-39) +Freeing nterm input (5@0-29) +Successful parse. stderr: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: 'x' (2@20-29) +thing (2@20-29): 'x' (2@20-29) +sending: 'x' (3@30-39) +30.30-39.38: syntax error, unexpected 'x', expecting ')' +Freeing nterm thing (2@20-29) +Freeing nterm thing (1@10-19) +Freeing token 'x' (3@30-39) +sending: 'x' (4@40-49) +Freeing token 'x' (4@40-49) +sending: 'x' (5@50-59) +Freeing token 'x' (5@50-59) +sending: ')' (6@60-69) +line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) +sending: '(' (7@70-79) +sending: 'x' (8@80-89) +thing (8@80-89): 'x' (8@80-89) +sending: ')' (9@90-99) +line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) +sending: '(' (10@100-109) +sending: 'x' (11@110-119) +thing (11@110-119): 'x' (11@110-119) +sending: ')' (12@120-129) +line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) +sending: 'y' (13@130-139) +input (0@129-129): /* Nothing */ +input (2@100-129): line (10@100-129) input (0@129-129) +input (2@70-129): line (7@70-99) input (2@100-129) +input (2@0-129): line (-1@0-69) input (2@70-129) +130.130-139.138: syntax error, unexpected 'y', expecting END +Freeing nterm input (2@0-129) +Freeing token 'y' (13@130-139) +Parsing FAILED. Starting parse Entering state 0 Reading a token @@ -11051,9 +11060,30 @@ Entering state 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) - +sending: '!' (0@0-9) +sending: '!' (1@10-19) +sending: '!' (2@20-29) +raise (5@10-29): ! (1@20-29) ! (2@20-29) +check-spontaneous-errors (5@10-29): error (@10-29) +sending: END (3@30-39) +Freeing token END (3@30-39) +Freeing nterm input (5@0-29) +Successful parse. +379. types.at:139: ok ./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +356. actions.at:1856: ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file + ok + + +403. types.at:139: testing lalr1.cc api.value.type={struct foo} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +404. types.at:139: testing lalr1.cc api.value.type={struct foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: Starting parse @@ -11077,100 +11107,15 @@ Entering state 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) -sending: '!' (0@0-9) -sending: '!' (1@10-19) -sending: '!' (2@20-29) -raise (5@10-29): ! (1@20-29) ! (2@20-29) -check-spontaneous-errors (5@10-29): error (@10-29) -sending: END (3@30-39) -Freeing token END (3@30-39) -Freeing nterm input (5@0-29) -Successful parse. -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 49): --> $$ = nterm start (1.1: <*> for 'S' @ 1) -Entering state 1 -Stack now 0 1 -Reading a token -Now at end of input. -Shifting token END (1.1: <*> for 'E' @ 1) -Entering state 2 -Stack now 0 1 2 -Stack now 0 1 2 -Cleanup: popping token END (1.1: <*> for 'E' @ 1) -Cleanup: popping nterm start (1.1: <*> for 'S' @ 1) -404. types.at:139: testing lalr1.cc api.value.type={struct foo} %header ... +syntax error, unexpected '<', expecting end of file +./actions.at:1054: $PREPARSER ./input '(xxxxx)(x)(x)y' +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./actions.at:1856: sed -ne '/ival:/p' stderr -./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./actions.at:1417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -403. types.at:139: testing lalr1.cc api.value.type={struct foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -405. types.at:139: testing lalr1.cc api.value.type={struct bar} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -357. actions.at:1856: ok -stdout: -stderr: -./types.at:139: $PREPARSER ./test -./actions.at:1057: $PREPARSER ./input '(y)' -stderr: -stdout: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -stdout: -stdout: -347. actions.at:1307: ok -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test - - -406. types.at:139: testing lalr1.cc api.value.type={struct bar} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stderr: -sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) -sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -stderr: -stderr: -stderr: -407. types.at:139: testing lalr1.cc api.value.type={union foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1057: $PREPARSER ./input '(xxxxx)(x)(x)y' -383. types.at:139: ok +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./actions.at:1056: $PREPARSER ./input '(y)' sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11206,47 +11151,23 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -382. types.at:139: ok -381. types.at:139: ok -stderr: -386. types.at:139: ok -stdout: - -307. headers.at:188: ok - - -408. types.at:139: testing lalr1.cc api.value.type={union foo} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -409. types.at:139: testing lalr1.cc %union { float fval; int ival; }; ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -410. types.at:139: testing lalr1.cc %union { float fval; int ival; }; %header ... -411. types.at:139: testing lalr1.cc api.value.type=union ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -412. types.at:139: testing lalr1.cc api.value.type=union %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./actions.at:1057: $PREPARSER ./input '(x)(x)x' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +./actions.at:1057: $PREPARSER ./input '(y)' +./actions.at:1053: $PREPARSER ./input '(x)(x)x' +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stdout: stderr: -./actions.at:1056: $PREPARSER ./input '(x)' +sending: '(' (0@0-9) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) +sending: ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (-1@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11266,223 +11187,88 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -./actions.at:1053: $PREPARSER ./input '(x)' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -397. types.at:139: ok +./conflicts.at:388: $PREPARSER ./input '0>0' sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) sending: END (3@30-39) input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) +input (2@0-29): line (-1@0-29) input (0@29-29) Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -stdout: -./types.at:139: $PREPARSER ./test -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -342. actions.at:1057: ok -stderr: -./actions.at:1056: $PREPARSER ./input '!' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1053: $PREPARSER ./input '!' -stderr: -stderr: - -stdout: -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. -390. types.at:139: ok -309. headers.at:191: ok -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -stderr: -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. -stdout: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +357. actions.at:1856: ok -413. types.at:139: testing lalr1.cc api.value.type=variant ... -./actions.at:478: $PREPARSER ./input -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +405. types.at:139: testing lalr1.cc api.value.type={struct bar} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -stderr: -stderr: -stderr: -stdout: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -stderr: -stdout: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1054: $PREPARSER ./input '(x)(x)x' stderr: stdout: -./actions.at:374: $PREPARSER ./input -stderr: -stderr: -stderr: -1.1 -1.1: syntax error -414. types.at:139: testing lalr1.cc api.value.type=variant %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./actions.at:1056: $PREPARSER ./input '!!!' -./actions.at:374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1053: $PREPARSER ./input '!!!' -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -415. types.at:139: testing lalr1.cc api.value.type=variant ... -416. types.at:139: testing lalr1.cc api.value.type=variant %header ... -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -388. types.at:139: 392. types.at:139: 333. actions.at:478: ok - ok - ok -stderr: -sending: '!' (0@0-9) -sending: '!' (1@10-19) -sending: '!' (2@20-29) -raise (5@10-29): ! (1@20-29) ! (2@20-29) -check-spontaneous-errors (5@10-29): error (@10-29) -sending: END (3@30-39) -Freeing token END (3@30-39) -Freeing nterm input (5@0-29) -Successful parse. -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1417: $PREPARSER ./input1 --debug stderr: -sending: '!' (0@0-9) -sending: '!' (1@10-19) -sending: '!' (2@20-29) -raise (5@10-29): ! (1@20-29) ! (2@20-29) -check-spontaneous-errors (5@10-29): error (@10-29) -sending: END (3@30-39) -Freeing token END (3@30-39) -Freeing nterm input (5@0-29) -Successful parse. -385. types.at:139: stderr: -stdout: -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -./actions.at:1053: $PREPARSER ./input '(y)' -./types.at:139: $PREPARSER ./test stdout: -391. types.at:139: ok -./types.at:139: $PREPARSER ./test -stderr: +./headers.at:323: echo "x4" >>expout +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -326. actions.at:374: - ok -stderr: stderr: -sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) -sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -./actions.at:1056: $PREPARSER ./input '(y)' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - +./headers.at:324: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x5.cc x5.y stderr: +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 49): +-> $$ = nterm start (1.1: <*> for 'S' @ 1) +Entering state 1 +Stack now 0 1 +Reading a token +Now at end of input. +Shifting token END (1.1: <*> for 'E' @ 1) +Entering state 2 +Stack now 0 1 2 +Stack now 0 1 2 +Cleanup: popping token END (1.1: <*> for 'E' @ 1) +Cleanup: popping nterm start (1.1: <*> for 'S' @ 1) sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. - - +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: 'x' (6@60-69) +thing (6@60-69): 'x' (6@60-69) +sending: END (7@70-79) +70.70-79.78: syntax error, unexpected END, expecting 'x' +Freeing nterm thing (6@60-69) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Freeing token END (7@70-79) +Parsing FAILED. +./actions.at:1056: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1057: $PREPARSER ./input '(xxxxx)(x)(x)y' +339. actions.at:1053: ok -stderr: -stdout: -417. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor ... +406. types.at:139: testing lalr1.cc api.value.type={struct bar} %header ... +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -419. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant ... -./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -420. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant %header ... -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -421. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... -422. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -418. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor %header ... -stderr: -======== Testing with C++ standard flags: '' -387. types.at:139: 384. types.at:139: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - ok - ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1053: $PREPARSER ./input '(xxxxx)(x)(x)y' -./actions.at:1056: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: +./conflicts.at:388: $PREPARSER ./input '0>0>0' sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11518,11 +11304,12 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - - +stderr: +syntax error, unexpected '>', expecting end of file sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11558,15 +11345,60 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -389. types.at:139: ok -./actions.at:1053: $PREPARSER ./input '(x)(x)x' +340. actions.at:1054: 347. actions.at:1307: ok + ok -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -423. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... + +stderr: +stdout: +stderr: +309. headers.at:191: stdout: + ok +./types.at:139: $PREPARSER ./test stderr: +stdout: +407. types.at:139: testing lalr1.cc api.value.type={union foo} ... +./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +408. types.at:139: testing lalr1.cc api.value.type={union foo} %header ... ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: +./headers.at:324: $CXX $CPPFLAGS $CXXFLAGS -c -o x5.o x5.cc +stdout: +stderr: +stderr: + +./actions.at:374: $PREPARSER ./input ./actions.at:1056: $PREPARSER ./input '(x)(x)x' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +409. types.at:139: testing lalr1.cc %union { float fval; int ival; }; ... +stdout: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./actions.at:478: $PREPARSER ./input +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./types.at:139: $PREPARSER ./test +stderr: +stderr: +./actions.at:1057: $PREPARSER ./input '(x)(x)x' +stderr: +387. types.at:139: 381. types.at:139: ok + ok +1.1 +1.1: syntax error sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11586,23 +11418,24 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -425. types.at:139: testing glr.cc api.value.type={double} ... -424. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... +./actions.at:374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:388: $PREPARSER ./input '0<0>0' +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file + + +410. types.at:139: testing lalr1.cc %union { float fval; int ival; }; %header ... +411. types.at:139: testing lalr1.cc api.value.type=union ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: +syntax error, unexpected '>', expecting end of file stderr: -stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: $PREPARSER ./test sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11622,39 +11455,68 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. +333. actions.at:478: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +412. types.at:139: testing lalr1.cc api.value.type=union %header ... +./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -339. actions.at:1053: ok -stderr: -./types.at:139: ./check -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +385. types.at:139: ok stderr: stdout: -./types.at:139: ./check -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -341. actions.at:1056: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +326. actions.at:374: 341. actions.at:1056: ok ok -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -396. types.at:139: ok stderr: +./types.at:139: $PREPARSER ./test stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test -427. types.at:139: testing glr.cc api.value.type={variant} ... -426. types.at:139: testing glr.cc api.value.type={double} %header ... + +415. types.at:139: testing lalr1.cc api.value.type=variant ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +413. types.at:139: testing lalr1.cc api.value.type=variant ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +414. types.at:139: testing lalr1.cc api.value.type=variant %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: +307. headers.at:188: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +342. actions.at:1057: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +209. conflicts.at:301: ok + + + +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +416. types.at:139: testing lalr1.cc api.value.type=variant %header ... +417. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor ... +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -428. types.at:139: testing glr.cc api.value.type={variant} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./types.at:139: $PREPARSER ./test +418. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: @@ -11663,89 +11525,202 @@ stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: stderr: -stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: ./check +stderr: +stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +395. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +392. types.at:139: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -393. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + stderr: stdout: ./types.at:139: $PREPARSER ./test - -398. types.at:139: ok - -429. types.at:139: testing glr.cc api.value.type={struct foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +stderr: ./types.at:139: $PREPARSER ./test -430. types.at:139: testing glr.cc api.value.type={struct foo} %header ... +stderr: +stderr: +stdout: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +419. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test +420. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant %header ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: +stderr: +stderr: stderr: +390. types.at:139: ok +383. types.at:139: stderr: + ok stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -395. types.at:139: ok +384. types.at:139: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +382. types.at:139: ok +386. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +396. types.at:139: ok +391. types.at:139: ok + + + + +397. types.at:139: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file + + + + +421. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... +stderr: +======== Testing with C++ standard flags: '' +stdout: +./types.at:139: $PREPARSER ./test +422. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... +423. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... +424. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +426. types.at:139: testing glr.cc api.value.type={double} %header ... +425. types.at:139: testing glr.cc api.value.type={double} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +427. types.at:139: testing glr.cc api.value.type={variant} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +428. types.at:139: testing glr.cc api.value.type={variant} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] -time limit exceeded: 6.000000 - First example H i . J K $end - Shift derivation $accept -> [ a -> [ H i -> [ i . J K ] ] $end ] - Second example H i . J $end - Reduce derivation $accept -> [ s -> [ a -> [ H i . ] J ] $end ] -input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] +393. types.at:139: 389. types.at:139: ok + ok +388. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr + + +429. types.at:139: testing glr.cc api.value.type={struct foo} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y 431. types.at:139: testing glr.cc api.value.type={struct bar} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +430. types.at:139: testing glr.cc api.value.type={struct foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file 394. types.at:139: ok + stderr: -267. counterexample.at:441: ok stdout: -./headers.at:324: echo "x5" >>expout -./headers.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x6.c x6.y - - +./types.at:139: $PREPARSER ./test 432. types.at:139: testing glr.cc api.value.type={struct bar} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +stdout: +stdout: +./types.at:139: ./check +./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./headers.at:325: $CC $CFLAGS $CPPFLAGS -c -o x6.o x6.c +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +./types.at:139: ./check +stderr: +stdout: +stdout: +stdout: +./types.at:139: ./check +./types.at:139: ./check +./types.at:139: ./check +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +398. types.at:139: ok + +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file 433. types.at:139: testing glr.cc api.value.type={union foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stderr: stdout: ./actions.at:1060: $PREPARSER ./input '(x)' +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -11758,14 +11733,24 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./types.at:139: $PREPARSER ./test ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./actions.at:1060: $PREPARSER ./input '!' +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stderr: sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -11773,10 +11758,37 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./types.at:139: $PREPARSER ./test ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./actions.at:1060: $PREPARSER ./input '!!!' +stderr: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -11786,12 +11798,97 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./actions.at:1856: $PREPARSER ./input --debug +stderr: +stdout: +./headers.at:324: echo "x5" >>expout ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Reducing stack 0 by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x6.c x6.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: +stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +Starting parse +Entering state 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Reducing stack 0 by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./actions.at:1060: $PREPARSER ./input '(y)' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file stderr: stdout: ./types.at:139: $PREPARSER ./test +./actions.at:1856: sed -ne '/ival:/p' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -11804,20 +11901,60 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:325: $CC $CFLAGS $CPPFLAGS -c -o x6.o x6.c +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +358. actions.at:1856: ok stderr: +stdout: +./types.at:139: $PREPARSER ./test + stderr: stdout: +434. types.at:139: testing glr.cc api.value.type={union foo} %header ... +./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1060: $PREPARSER ./input '(xxxxx)(x)(x)y' -./actions.at:1059: $PREPARSER ./input '(x)' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: +./actions.at:1059: $PREPARSER ./input '(x)' +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file stderr: +stdout: ./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: +stderr: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (0@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11853,29 +11990,57 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. +./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./headers.at:325: echo "x6" >>expout +stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: $PREPARSER ./test ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./headers.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x7.c x7.y stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./actions.at:1059: $PREPARSER ./input '!' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. stdout: -./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. ./actions.at:1060: $PREPARSER ./input '(x)(x)x' stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:326: $CC $CFLAGS $CPPFLAGS -c -o x7.o x7.c stderr: -./actions.at:1059: $PREPARSER ./input '!' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -11896,30 +12061,27 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: ======== Testing with C++ standard flags: '' +./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. -./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1059: $PREPARSER ./input '!!!' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test -stderr: -344. actions.at:1060: ok -./actions.at:1059: $PREPARSER ./input '!!!' -stdout: -stderr: -./types.at:139: $PREPARSER ./test stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -11929,22 +12091,77 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. +344. actions.at:1060: ok + stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +435. types.at:139: testing glr.cc %union { float fval; int ival; }; ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +stdout: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' - +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./actions.at:1059: $PREPARSER ./input '(y)' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: +stderr: ./types.at:139: $PREPARSER ./test -./actions.at:1059: $PREPARSER ./input '(y)' +stdout: +./types.at:139: $PREPARSER ./test +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -11957,16 +12174,73 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./headers.at:326: echo "x7" >>expout +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -434. types.at:139: testing glr.cc api.value.type={union foo} %header ... -./actions.at:1059: $PREPARSER ./input '(xxxxx)(x)(x)y' +stderr: +stderr: ======== Testing with C++ standard flags: '' +stderr: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' +./headers.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x8.c x8.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +stderr: +./actions.at:1059: $PREPARSER ./input '(xxxxx)(x)(x)y' +stderr: +stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +stderr: +stderr: +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: +stderr: +stderr: +stderr: stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -12003,79 +12277,65 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./types.at:139: $PREPARSER ./test stderr: stdout: -./headers.at:325: echo "x6" >>expout -./actions.at:1856: $PREPARSER ./input --debug -./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./headers.at:327: $CC $CFLAGS $CPPFLAGS -c -o x8.o x8.c stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +stderr: stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ./types.at:139: $PREPARSER ./test -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x7.c x7.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed -ne '/ival:/p' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1059: $PREPARSER ./input '(x)(x)x' stderr: stdout: ./types.at:139: $PREPARSER ./test -./actions.at:1059: $PREPARSER ./input '(x)(x)x' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -358. actions.at:1856: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stderr: +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -12095,129 +12355,123 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -343. actions.at:1059: ok -./headers.at:326: $CC $CFLAGS $CPPFLAGS -c -o x7.o x7.c - -435. types.at:139: testing glr.cc %union { float fval; int ival; }; ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -436. types.at:139: testing glr.cc %union { float fval; int ival; }; %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' stderr: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +-std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +343. actions.at:1059: ok stderr: stdout: -./types.at:139: $PREPARSER ./test stderr: +./types.at:139: ./check stdout: +./types.at:139: ./check + stderr: +stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +436. types.at:139: testing glr.cc %union { float fval; int ival; }; %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test +-std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./types.at:139: ./check +stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +-std=c++98 not supported +======== Testing with C++ standard flags: '' stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: -./headers.at:326: echo "x7" >>expout -./headers.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x8.c x8.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./headers.at:327: $CC $CFLAGS $CPPFLAGS -c -o x8.o x8.c +./types.at:139: $PREPARSER ./test stderr: stdout: +./headers.at:327: echo "x8" >>expout +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./headers.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x9.cc x9.y +-std=c++98 not supported +======== Testing with C++ standard flags: '' stderr: -./types.at:139: $PREPARSER ./test -stdout: stderr: +stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: -stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: ./check ======== Testing with C++ standard flags: '' -./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o x9.o x9.cc +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file stderr: stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./types.at:139: ./check stderr: stdout: ./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +-std=c++03 not supported +======== Testing with C++ standard flags: '' +-std=c++03 not supported +======== Testing with C++ standard flags: '' stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12225,524 +12479,319 @@ stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: +./types.at:139: $PREPARSER ./test stderr: -./types.at:139: ./check stdout: +./types.at:139: $PREPARSER ./test +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $PREPARSER ./test stderr: -./headers.at:327: echo "x8" >>expout -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x9.cc x9.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./headers.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o x9.o x9.cc -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: +stderr: ./types.at:139: ./check stderr: --std=c++98 not supported -======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: +./types.at:139: ./check stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +-std=c++03 not supported ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: ./check ======== Testing with C++ standard flags: '' --std=c++11 not supported +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $PREPARSER ./test stderr: stdout: -./types.at:139: ./check ./types.at:139: $PREPARSER ./test --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./types.at:139: ./check +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +-std=c++03 not supported +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: ./check --std=c++11 not supported ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +stdout: +./types.at:139: ./check +./types.at:139: ./check stderr: stdout: ./types.at:139: $PREPARSER ./test +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +-std=c++11 not supported stdout: +======== Testing with C++ standard flags: '' +-std=c++11 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: ./check ./types.at:139: ./check +-std=c++11 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: --std=c++98 not supported +-std=c++11 not supported ======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: stderr: -stdout: ./types.at:139: $PREPARSER ./test stdout: -./types.at:139: $PREPARSER ./test -stderr: +./types.at:139: ./check stderr: stdout: ./types.at:139: $PREPARSER ./test -stdout: -./types.at:139: $PREPARSER ./test -stderr: stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +-std=c++11 not supported +======== Testing with C++ standard flags: '' +stderr: stderr: stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: stderr: +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -stdout: -./types.at:139: ./check -./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example: . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: . c A A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 4: %empty . `-> 6: c A A -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: b . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: b . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 6: c A - `-> 4: %empty . -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: c . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: c . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 7.000000 - First example: b c . A - Shift derivation - a - `-> 1: b d - `-> 6: c . A - Second example: b c . c A A $end - Reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example: b c . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: b c . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - First example: b c . A - Shift derivation - a - `-> 1: b d - `-> 6: c . A - Second example: b c . A $end - Reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: b d . - First reduce derivation - a - `-> 1: b d . - Second reduce derivation - a - `-> 1: b d - `-> 7: d . -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: c d . - First reduce derivation - a - `-> 2: c d . - Second reduce derivation - a - `-> 2: c d - `-> 7: d . -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./counterexample.at:621: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -======== Testing with C++ standard flags: '' -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: ./check stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: stdout: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: -stderr: ./types.at:139: ./check -stdout: -std=c++11 not supported ======== Testing with C++ standard flags: '' -./types.at:139: ./check --std=c++98 not supported +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: ./check +======== Testing with C++ standard flags: '' stdout: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: --std=c++03 not supported -======== Testing with C++ standard flags: '' stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./types.at:139: ./check stderr: stdout: ./types.at:139: ./check -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS --std=c++11 not supported -======== Testing with C++ standard flags: '' --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stderr: -stdout: -./types.at:139: ./check -stdout: --std=c++11 not supported -======== Testing with C++ standard flags: '' ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: ./check stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: ./check -stderr: --std=c++11 not supported -======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12751,242 +12800,246 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -./types.at:139: $PREPARSER ./test +./headers.at:328: echo "x9" >>expout +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xa.cc xa.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -stdout: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: +stdout: +./types.at:139: $PREPARSER ./test +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./headers.at:329: $CXX $CPPFLAGS $CXXFLAGS -c -o xa.o xa.cc +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test -stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: $PREPARSER ./test stdout: -======== Testing with C++ standard flags: '' -./headers.at:328: echo "x9" >>expout -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./headers.at:329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xa.cc xa.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: stderr: +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: -./headers.at:329: $CXX $CPPFLAGS $CXXFLAGS -c -o xa.o xa.cc ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stdout: stderr: +./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test stderr: stdout: +./types.at:139: $PREPARSER ./test stderr: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test stderr: stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +stderr: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: stdout: ./types.at:139: $PREPARSER ./test +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: +stderr: stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test -stdout: stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12996,80 +13049,126 @@ stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stderr: stderr: stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./types.at:139: $PREPARSER ./test +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] +time limit exceeded: 7.000000 + First example H i . J K $end + Shift derivation $accept -> [ a -> [ H i -> [ i . J K ] ] $end ] + Second example H i . J $end + Reduce derivation $accept -> [ s -> [ a -> [ H i . ] J ] $end ] +input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr stdout: ./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -======== Testing with C++ standard flags: '' +stdout: +./headers.at:329: echo "xa" >>expout +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./headers.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xb.cc xb.y stderr: +267. counterexample.at:441: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: + +437. types.at:139: testing glr.cc api.value.type=union ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: ======== Testing with C++ standard flags: '' -stdout: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +258. conflicts.at:2331: stdout: + ok ./types.at:139: $PREPARSER ./test + +438. types.at:139: testing glr.cc api.value.type=union %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./headers.at:330: $CXX $CPPFLAGS $CXXFLAGS -c -o xb.o xb.cc +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./headers.at:329: echo "xa" >>expout -stderr: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./headers.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xb.cc xb.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: stdout: -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: ./check stdout: -stderr: +stdout: +./types.at:139: ./check ./types.at:139: ./check +stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: @@ -13077,211 +13176,185 @@ stderr: stdout: ./types.at:139: $PREPARSER ./test -./headers.at:330: $CXX $CPPFLAGS $CXXFLAGS -c -o xb.o xb.cc -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: -stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: ./types.at:139: $PREPARSER ./test +stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./types.at:139: $PREPARSER ./test stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check stderr: -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: $PREPARSER ./test stderr: ======== Testing with C++ standard flags: '' -./types.at:139: $PREPARSER ./test ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -stdout: -./types.at:139: ./check -./types.at:139: $PREPARSER ./test -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: stderr: +./types.at:139: $PREPARSER ./test stdout: +./types.at:139: $PREPARSER ./test stderr: -./types.at:139: ./check stdout: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: -======== Testing with C++ standard flags: '' -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: -./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test stderr: +stdout: +./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' +stderr: +stderr: +stderr: +stderr: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +stderr: stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +./headers.at:330: echo "xb" >>expout stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test +./headers.at:331: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xc.cc xc.y stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -13289,802 +13362,815 @@ ./types.at:139: $PREPARSER ./test stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stderr: ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stderr: ./types.at:139: $PREPARSER ./test stderr: stdout: -stdout: ./types.at:139: $PREPARSER ./test -./headers.at:330: echo "xb" >>expout stderr: stdout: ./types.at:139: $PREPARSER ./test -stdout: -stdout: stderr: -./types.at:139: $PREPARSER ./test stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test stderr: -stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: -./headers.at:331: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xc.cc xc.y -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: stderr: stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' +./headers.at:331: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xc.o xc.cc ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -stdout: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' -./headers.at:331: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xc.o xc.cc ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: stdout: -stderr: -./types.at:139: $PREPARSER ./test -stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -stderr: -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: ./check -stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: -stdout: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: stdout: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: stdout: ./types.at:139: $PREPARSER ./test +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: stdout: +stderr: ./types.at:139: $PREPARSER ./test +stdout: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' stderr: stderr: stderr: +======== Testing with C++ standard flags: '' +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: -stderr: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: ./check -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./types.at:139: ./check +stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: ./check +stderr: +stdout: stderr: stdout: ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: $PREPARSER ./test stdout: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stdout: +stdout: stderr: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: ======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: +./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $PREPARSER ./test stderr: stderr: stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stderr: -stderr: -stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -stderr: -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: $PREPARSER ./test +stderr: +stderr: stderr: stdout: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stderr: stdout: -./types.at:139: $PREPARSER ./test ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ======== Testing with C++ standard flags: '' +stderr: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +stderr: +./types.at:139: $PREPARSER ./test +stdout: ./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +======== Testing with C++ standard flags: '' stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: ./check -stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test -stderr: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stderr: +./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test +stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +stderr: +stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' stderr: +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: ./check +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: ./check +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: ./check stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: +./headers.at:331: echo "xc" >>expout stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: +./headers.at:332: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xd.cc xd.y +======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: +stderr: +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stderr: ======== Testing with C++ standard flags: '' +stderr: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./headers.at:331: echo "xc" >>expout -./headers.at:332: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xd.cc xd.y +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example . c A A $end - First reduce derivation $accept -> [ a -> [ b -> [ . ] d -> [ c A A ] ] $end ] - Second example . c A A $end - Second reduce derivation $accept -> [ a -> [ c -> [ . ] d -> [ c A A ] ] $end ] + First example: . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: . c A A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 4: %empty . `-> 6: c A A input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] time limit exceeded: 6.000000 - First example b . c A A $end - First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] - Second example b . A $end - Second reduce derivation $accept -> [ a -> [ b d -> [ c -> [ . ] A ] ] $end ] + First example: b . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: b . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 6: c A + `-> 4: %empty . input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example c . c A A $end - First reduce derivation $accept -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] - Second example c . A $end - Second reduce derivation $accept -> [ a -> [ c d -> [ c -> [ . ] A ] ] $end ] +time limit exceeded: 7.000000 + First example: c . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: c . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 6: c A + `-> 4: %empty . input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] time limit exceeded: 6.000000 - First example b c . A - Shift derivation a -> [ b d -> [ c . A ] ] - Second example b c . c A A $end - Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] + First example: b c . A + Shift derivation + a + `-> 1: b d + `-> 6: c . A + Second example: b c . c A A $end + Reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example b c . c A A $end - First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] - Second example b c . A $end - Second reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] + First example: b c . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: b c . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 6: c A + `-> 4: %empty . input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - First example b c . A - Shift derivation a -> [ b d -> [ c . A ] ] - Second example b c . A $end - Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] + First example: b c . A + Shift derivation + a + `-> 1: b d + `-> 6: c . A + Second example: b c . A $end + Reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 6: c A + `-> 4: %empty . input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example b d . - First reduce derivation a -> [ b d . ] - Second reduce derivation a -> [ b d -> [ d . ] ] + Example: b d . + First reduce derivation + a + `-> 1: b d . + Second reduce derivation + a + `-> 1: b d + `-> 7: d . input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example c d . - First reduce derivation a -> [ c d . ] - Second reduce derivation a -> [ c d -> [ d . ] ] + Example: c d . + First reduce derivation + a + `-> 2: c d . + Second reduce derivation + a + `-> 2: c d + `-> 7: d . input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr stdout: -./types.at:139: ./check -./types.at:139: ./check +./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test -./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -270. counterexample.at:610: ok stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test - -437. types.at:139: testing glr.cc api.value.type=union ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./headers.at:332: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xd.o xd.cc -stderr: ======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' stderr: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' stderr: -stdout: stderr: -./types.at:139: $PREPARSER ./test -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: +./counterexample.at:621: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stderr: -./types.at:139: $PREPARSER ./test ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: @@ -14094,540 +14180,467 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stderr: -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stdout: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: -stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' +stderr: +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -stderr: -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: -./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./types.at:139: $PREPARSER ./test -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check stderr: stdout: ./types.at:139: ./check ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stderr: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' +stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' +stderr: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: +======== Testing with C++ standard flags: '' +stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' +stderr: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./types.at:139: $PREPARSER ./test +stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test +stderr: stdout: +./types.at:139: $PREPARSER ./test stderr: +stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +======== Testing with C++ standard flags: '' +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +401. types.at:139: ok + +439. types.at:139: testing glr2.cc api.value.type={double} ... +======== Testing with C++ standard flags: '' stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +403. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +440. types.at:139: testing glr2.cc api.value.type={double} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: $PREPARSER ./test stdout: +./types.at:139: $PREPARSER ./test +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -409. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test - stderr: +stdout: +./types.at:139: ./check stderr: stdout: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: $PREPARSER ./test +stdout: stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stdout: -438. types.at:139: testing glr.cc api.value.type=union %header ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -419. types.at:139: ok -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -421. types.at:139: ok -407. types.at:139: ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +405. types.at:139: ok 399. types.at:139: ok - - -stderr: -stdout: -439. types.at:139: testing glr2.cc api.value.type={double} ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + -440. types.at:139: testing glr2.cc api.value.type={double} %header ... -======== Testing with C++ standard flags: '' -stderr: 441. types.at:139: testing glr2.cc api.value.type={variant} ... ======== Testing with C++ standard flags: '' +442. types.at:139: testing glr2.cc api.value.type={variant} %header ... +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -423. types.at:139: ok -442. types.at:139: testing glr2.cc api.value.type={variant} %header ... +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -======== Testing with C++ standard flags: '' stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -443. types.at:139: testing glr2.cc api.value.type={struct foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: ./check stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +./types.at:139: $PREPARSER ./test stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +409. types.at:139: ======== Testing with C++ standard flags: '' + ok +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +422. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -406. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + 444. types.at:139: testing glr2.cc api.value.type={struct foo} %header ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +443. types.at:139: testing glr2.cc api.value.type={struct foo} ... +======== Testing with C++ standard flags: '' stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -401. types.at:139: ok +stdout: +./types.at:139: $PREPARSER ./test stderr: stdout: - +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stderr: +419. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + 445. types.at:139: testing glr2.cc api.value.type={struct bar} ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -422. types.at:139: ok -stdout: -./types.at:139: ./check stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - stdout: +400. types.at:139: ok +411. types.at:139: ok ./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -446. types.at:139: testing glr2.cc api.value.type={struct bar} %header ... -======== Testing with C++ standard flags: '' -408. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + stderr: -stdout: -./types.at:139: ./check -stderr: -stdout: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test -447. types.at:139: testing glr2.cc api.value.type={union foo} ... +446. types.at:139: testing glr2.cc api.value.type={struct bar} %header ... ======== Testing with C++ standard flags: '' -stderr: +407. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -403. types.at:139: ok +447. types.at:139: testing glr2.cc api.value.type={union foo} ... +======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test 448. types.at:139: testing glr2.cc api.value.type={union foo} %header ... ======== Testing with C++ standard flags: '' -stderr: -stdout: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $PREPARSER ./test -stderr: -stdout: -./types.at:139: ./check -stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -405. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -449. types.at:139: testing glr2.cc %union { float fval; int ival; }; ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -402. types.at:139: ok -stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -450. types.at:139: testing glr2.cc %union { float fval; int ival; }; %header ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: ./check stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: -stdout: +402. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +449. types.at:139: testing glr2.cc %union { float fval; int ival; }; ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -411. types.at:139: ok -stderr: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +424. types.at:139: 404. types.at:139: ok + ok + stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./headers.at:332: echo "xd" >>expout +450. types.at:139: testing glr2.cc %union { float fval; int ival; }; %header ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y 451. types.at:139: testing glr2.cc api.value.type=union ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: stderr: stdout: ./types.at:139: ./check -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -404. types.at:139: ok - -452. types.at:139: testing glr2.cc api.value.type=union %header ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check stderr: stdout: -./headers.at:332: echo "xd" >>expout -412. types.at:139: ok +./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./headers.at:342: "$PERL" -n -0777 -e ' # Ignore comments. s{/\*.*?\*/}{}gs; @@ -14671,337 +14684,318 @@ } } ' -- *.hh *.h - -stderr: -stdout: -./headers.at:387: $CC $CFLAGS $CPPFLAGS -c -o c-only.o c-only.c -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -453. types.at:377: testing lalr1.cc: Named %union ... -./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: stderr: -./headers.at:387: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx-only.o cxx-only.cc stdout: -453. types.at:377: ok +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: - -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -400. types.at:139: ok stderr: stdout: -./headers.at:387: $CXX $CXXFLAGS $CPPFLAGS $LDFLAGS c-only.o cxx-only.o -o c-and-cxx || - exit 77 -454. types.at:377: testing glr.cc: Named %union ... -./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y - +./types.at:139: $PREPARSER ./test stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test -455. scanner.at:326: testing Token numbers: yacc.c ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -454. types.at:377: ok -stdout: -./headers.at:387: $PREPARSER ./c-and-cxx stderr: -./headers.at:387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - -stdout: -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./types.at:139: $PREPARSER ./test -./headers.at:392: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o parser x[1-9a-d].o -DCC_IS_CXX=$CC_IS_CXX main.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -456. scanner.at:326: testing Token numbers: yacc.c api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +408. types.at:139: ok +421. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + +452. types.at:139: testing glr2.cc api.value.type=union %header ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +453. types.at:377: testing lalr1.cc: Named %union ... +./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -433. types.at:139: ok - -457. scanner.at:326: testing Token numbers: glr.c ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: stdout: -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./types.at:139: $PREPARSER ./test stderr: +stdout: +./types.at:139: ./check +453. types.at:377: ok + +454. types.at:377: testing glr.cc: Named %union ... +./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./headers.at:387: $CC $CFLAGS $CPPFLAGS -c -o c-only.o c-only.c +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: stderr: -./types.at:139: $PREPARSER ./test +stderr: +412. types.at:139: ok +423. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + +455. scanner.at:326: testing Token numbers: yacc.c ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +454. types.at:377: ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +456. scanner.at:326: testing Token numbers: yacc.c api.token.raw ... +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +418. types.at:139: ok 417. types.at:139: ok -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -410. types.at:139: ok + + +457. scanner.at:326: testing Token numbers: glr.c ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +459. scanner.at:326: testing Token numbers: lalr1.cc ... 458. scanner.at:326: testing Token numbers: glr.c api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - stderr: -stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c stderr: stdout: -459. scanner.at:326: testing Token numbers: lalr1.cc ... -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c -./scanner.at:326: $PREPARSER ./input -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: +./types.at:139: ./check +./types.at:139: ./check stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: -./scanner.at:326: $PREPARSER ./input stdout: +./types.at:139: $PREPARSER ./test +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stdout: +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: stderr: +stdout: +./types.at:139: $PREPARSER ./test +./headers.at:387: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx-only.o cxx-only.cc +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +410. types.at:139: 406. types.at:139: ok + ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -455. scanner.at:326: ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -420. types.at:139: ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +431. types.at:139: ok -456. scanner.at:326: ok + +461. scanner.at:326: testing Token numbers: glr.cc ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y 460. scanner.at:326: testing Token numbers: lalr1.cc api.token.raw ... -424. types.at:139: ok ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: - stdout: - -./types.at:139: $PREPARSER ./test - -stderr: -461. scanner.at:326: testing Token numbers: glr.cc ... 462. scanner.at:326: testing Token numbers: glr.cc api.token.raw ... -463. scanner.at:326: testing Token numbers: glr2.cc ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -418. types.at:139: ok stdout: -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -stderr: +stdout: +./headers.at:387: $CXX $CXXFLAGS $CPPFLAGS $LDFLAGS c-only.o cxx-only.o -o c-and-cxx || + exit 77 +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: +./headers.at:387: $PREPARSER ./c-and-cxx +stderr: +./headers.at:387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +429. types.at:139: ok + +463. scanner.at:326: testing Token numbers: glr2.cc ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./types.at:139: $PREPARSER ./test -./headers.at:394: $PREPARSER ./parser -======== Testing with C++ standard flags: '' ./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +433. types.at:139: ok stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c stderr: - +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +464. scanner.at:326: testing Token numbers: glr2.cc api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./headers.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -464. scanner.at:326: testing Token numbers: glr2.cc api.token.raw ... -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:392: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o parser x[1-9a-d].o -DCC_IS_CXX=$CC_IS_CXX main.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./scanner.at:326: $PREPARSER ./input ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./scanner.at:326: $PREPARSER ./input +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stderr: +stderr: +420. types.at:139: ok +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -310. headers.at:199: ok 465. scanner.at:326: testing Token numbers: lalr1.d ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -428. types.at:139: ok +455. scanner.at:326: 413. types.at:139: ok + ok +456. scanner.at:326: ok -./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -466. scanner.at:326: testing Token numbers: lalr1.d api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -465. scanner.at:326: stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c - skipped (scanner.at:326) -466. scanner.at:326: ./scanner.at:326: $PREPARSER ./input - skipped (scanner.at:326) stderr: stdout: ./types.at:139: $PREPARSER ./test -467. scanner.at:326: testing Token numbers: lalr1.java ... -stderr: -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y stderr: +stdout: +./types.at:139: $PREPARSER ./test +466. scanner.at:326: testing Token numbers: lalr1.d api.token.raw ... +467. scanner.at:326: testing Token numbers: lalr1.java ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y 468. scanner.at:326: testing Token numbers: lalr1.java api.token.raw ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -467. scanner.at:326: 457. scanner.at:326: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +466. scanner.at:326: 468. scanner.at:326: ======== Testing with C++ standard flags: '' +465. scanner.at:326: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + skipped (scanner.at:326) + skipped (scanner.at:326) + skipped (scanner.at:326) +467. scanner.at:326: 432. types.at:139: ok skipped (scanner.at:326) + + + 469. scanner.at:330: testing Token numbers: lalr1.cc api.token.raw api.value.type=variant api.token.constructor ... ./scanner.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y 470. calc.at:1334: testing Calculator parse.trace ... -468. scanner.at:326: ./calc.at:1334: mv calc.y.tmp calc.y - - skipped (scanner.at:326) -./calc.at:1334: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1334: mv calc.y.tmp calc.y -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -stdout: -./scanner.at:330: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./types.at:139: $PREPARSER ./test -stderr: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 471. calc.at:1336: testing Calculator %header ... +472. calc.at:1337: testing Calculator %debug %locations ... ./calc.at:1336: mv calc.y.tmp calc.y -./calc.at:1334: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -======== Testing with C++ standard flags: '' -414. types.at:139: ok -./calc.at:1336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -472. calc.at:1337: testing Calculator %debug %locations ... -./calc.at:1336: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1334: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1337: mv calc.y.tmp calc.y -stderr: ./calc.at:1337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./calc.at:1336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +473. calc.at:1338: testing Calculator %locations api.location.type={Span} ... +./calc.at:1338: mv calc.y.tmp calc.y + +./calc.at:1338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./scanner.at:326: $PREPARSER ./input -stderr: +./scanner.at:330: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -458. scanner.at:326: ok -425. types.at:139: ok - - +stderr: +stdout: ./calc.at:1337: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -473. calc.at:1338: testing Calculator %locations api.location.type={Span} ... -./calc.at:1338: mv calc.y.tmp calc.y - +./calc.at:1338: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1336: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +425. types.at:139: ok stderr: + stdout: -./calc.at:1338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./types.at:139: ./check +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c 474. calc.at:1340: testing Calculator %name-prefix "calc" ... ./calc.at:1340: mv calc.y.tmp calc.y ./calc.at:1340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1334: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./scanner.at:326: $PREPARSER ./input +./scanner.at:326: $PREPARSER ./input stderr: stdout: ./types.at:139: $PREPARSER ./test +./calc.at:1340: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -429. types.at:139: ok -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: -stdout: stderr: -./types.at:139: $PREPARSER ./test +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1338: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +430. types.at:139: ok +457. scanner.at:326: ok +458. scanner.at:326: ok -434. types.at:139: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -./calc.at:1340: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -stderr: -stdout: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -431. types.at:139: ok -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + +477. calc.at:1343: testing Calculator parse.error=detailed ... +./calc.at:1343: mv calc.y.tmp calc.y 475. calc.at:1341: testing Calculator %verbose ... ./calc.at:1341: mv calc.y.tmp calc.y -./calc.at:1341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: +./calc.at:1343: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y 476. calc.at:1342: testing Calculator %yacc ... ./calc.at:1342: if "$POSIXLY_CORRECT_IS_EXPORTED"; then @@ -15011,86 +15005,50 @@ fi -stdout: -./types.at:139: $PREPARSER ./test +./calc.at:1341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -477. calc.at:1343: testing Calculator parse.error=detailed ... -./calc.at:1343: mv calc.y.tmp calc.y - -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr --std=c++03 not supported -======== Testing with C++ standard flags: '' -./calc.at:1343: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -427. types.at:139: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - ok -stderr: stdout: ./types.at:139: $PREPARSER ./test +./calc.at:1343: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1342: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS ./calc.at:1341: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: - ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./types.at:139: $PREPARSER ./test -413. types.at:139: ok -stderr: -stderr: -./calc.at:1343: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1342: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - -426. types.at:139: ok -478. calc.at:1344: testing Calculator parse.error=verbose ... -./calc.at:1344: mv calc.y.tmp calc.y - -./calc.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - -479. calc.at:1346: testing Calculator api.pure=full %locations ... -./calc.at:1346: mv calc.y.tmp calc.y +./calc.at:1338: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1338: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c -./calc.at:1346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +427. types.at:139: ok stderr: -stdout: -./types.at:139: ./check stderr: -480. calc.at:1347: testing Calculator api.push-pull=both api.pure=full %locations ... -./calc.at:1347: mv calc.y.tmp calc.y - -./calc.at:1347: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stdout: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -432. types.at:139: ok stderr: stdout: -./types.at:139: ./check -./calc.at:1347: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS --std=c++03 not supported -======== Testing with C++ standard flags: '' -./calc.at:1346: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -481. calc.at:1348: testing Calculator parse.error=detailed %locations ... -./calc.at:1348: mv calc.y.tmp calc.y - -./calc.at:1348: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1340: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: stdout: -./calc.at:1338: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +478. calc.at:1344: testing Calculator parse.error=verbose ... +./calc.at:1337: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1344: mv calc.y.tmp calc.y + +./calc.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -./calc.at:1338: "$PERL" -ne ' +./calc.at:1337: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -15101,24 +15059,7 @@ || /\t/ )' calc.c -stdout: -./calc.at:1336: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1336: "$PERL" -ne ' +./calc.at:1340: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -15127,12 +15068,11 @@ || /\s$/ # No tabs. || /\t/ - )' calc.c calc.h + )' calc.c -stderr: +./scanner.at:326: $PREPARSER ./input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -15146,60 +15086,32 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1336: $PREPARSER ./calc input -stderr: +./calc.at:1338: $PREPARSER ./calc input stderr: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stdout: +./types.at:139: $PREPARSER ./test stderr: -./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc stderr: - | 1 2 -./calc.at:1338: $PREPARSER ./calc input stdout: -./types.at:139: ./check +./headers.at:394: $PREPARSER ./parser stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -1.3: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 2 +./calc.at:1344: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stdout: +./calc.at:1334: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: -./calc.at:1336: $PREPARSER ./calc input -1.3: syntax error stderr: stderr: -syntax error -stdout: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1342: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -syntax error +./scanner.at:326: $PREPARSER ./input +428. types.at:139: ok +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -stdout: +input: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -15213,88 +15125,7 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1338: cat stderr -stderr: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./scanner.at:326: $PREPARSER ./input -input: - | 1//2 -stderr: -./calc.at:1338: $PREPARSER ./calc input -input: -stderr: - | 1 2 -./calc.at:1342: $PREPARSER ./calc input -1.3: syntax error -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -459. scanner.at:326: 1.3: syntax error - ok -stderr: -syntax error -input: -stdout: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1340: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: -stderr: -stderr: -stdout: -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -syntax error - -stderr: -stdout: -stderr: -syntax error -./calc.at:1337: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./scanner.at:326: $PREPARSER ./input -input: -./calc.at:1338: cat stderr -stderr: +./calc.at:1340: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -15308,77 +15139,19 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1340: $PREPARSER ./calc input -stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -./calc.at:1338: $PREPARSER ./calc input -stderr: -460. scanner.at:326: ok -./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -1.1: syntax error -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1336: cat stderr -482. calc.at:1350: testing Calculator parse.error=detailed %locations %header api.prefix={calc} %verbose %yacc ... -./calc.at:1350: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi +./calc.at:1337: $PREPARSER ./calc input +479. calc.at:1346: testing Calculator api.pure=full %locations ... +./calc.at:1346: mv calc.y.tmp calc.y stderr: -1.1: syntax error - | 1 2 -./calc.at:1340: $PREPARSER ./calc input -stderr: -syntax error -input: -./calc.at:1342: cat stderr - - | error -./calc.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: "$PERL" -ne ' +./calc.at:1346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1334: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y (eof || $. == 1) && /^\s*$/ # No trailing space. || /\s$/ @@ -15386,41 +15159,8 @@ || /\t/ )' calc.c +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -input: -stderr: -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -stderr: -./calc.at:1342: $PREPARSER ./calc input -syntax error -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1337: $PREPARSER ./calc input -stderr: -stderr: -stdout: -syntax error -./calc.at:1338: cat stderr -./calc.at:1334: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: Starting parse Entering state 0 Stack now 0 @@ -16439,43 +16179,12 @@ Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1334: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1343: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -syntax error -input: +stdout: +./calc.at:1342: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +stderr: stderr: - | 1 = 2 = 3 Starting parse Entering state 0 Stack now 0 @@ -16793,8 +16502,7 @@ Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) -Ent483. calc.at:1351: testing Calculator parse.error=detailed %locations %header %name-prefix "calc" api.token.prefix={TOK_} %verbose %yacc ... -ering state 25 +Entering state 25 Stack now 0 6 8 25 Reducing stack by rule 4 (line 84): $1 = nterm exp (4.1-9: -1) @@ -17494,9 +17202,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1343: "$PERL" -ne ' +stderr: +./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1342: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -17507,21 +17215,12 @@ || /\t/ )' calc.c -./calc.at:1351: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -stderr: -./calc.at:1336: cat stderr -stderr: -input: +./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +310. headers.at:199: ok input: -stdout: -./calc.at:1340: cat stderr +stderr: +459. scanner.at:326: ok | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -17535,51 +17234,54 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 - | 1 2 -./calc.at:1337: $PREPARSER ./calc input -1.7: syntax error -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: ./calc.at:1334: $PREPARSER ./calc input -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1336: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' + + +480. calc.at:1347: testing Calculator api.push-pull=both api.pure=full %locations ... +./calc.at:1347: mv calc.y.tmp calc.y + +481. calc.at:1348: testing Calculator parse.error=detailed %locations ... +./calc.at:1348: mv calc.y.tmp calc.y + +./calc.at:1347: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1348: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -input: -input: -./calc.at:1351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +stderr: +stderr: +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1346: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1336: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +stderr: +stderr: +stderr: +./scanner.at:326: $PREPARSER ./input +460. scanner.at:326: input: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error - | 1 = 2 = 3 | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -17593,43 +17295,8 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1343: $PREPARSER ./calc input - | 1//2 -./calc.at:1340: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -./calc.at:1336: $PREPARSER ./calc input -./scanner.at:326: $PREPARSER ./input -stderr: -./calc.at:1342: cat stderr -syntax error +./calc.at:1342: $PREPARSER ./calc input stderr: -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -18648,34 +18315,58 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stdout: +./types.at:139: $PREPARSER ./test stderr: stderr: -syntax error + +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test stderr: +stdout: +./calc.at:1341: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +482. calc.at:1350: testing Calculator parse.error=detailed %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1350: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stdout: +./types.at:139: $PREPARSER ./test +input: +input: +./calc.at:1348: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | 1 2 stderr: stderr: +./calc.at:1338: $PREPARSER ./calc input stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +stderr: +stderr: + | 1 2 +stderr: +./calc.at:1340: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1341: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + Starting parse Entering state 0 Stack now 0 @@ -19693,61 +19384,13 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -stderr: -stderr: -./calc.at:1344: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | error -syntax error -stdout: -./calc.at:1341: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: -stdout: -./calc.at:1341: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. -./calc.at:1342: $PREPARSER ./calc input -stdout: -./calc.at:1344: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -stderr: - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -462. scanner.at:326: syntax error - ok -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: cat stderr -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -input: -input: - | 1 2 -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1337: cat stderr -stderr: input: -stderr: - | 1 2 -./calc.at:1334: $PREPARSER ./calc input -stderr: -stderr: -syntax error, unexpected number +426. types.at:139: ok +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -19761,13 +19404,28 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: $PREPARSER ./calc input input: -./calc.at:1350: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1347: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +stderr: + | 1 2 +stderr: +stderr: +./calc.at:1337: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' +1.3: syntax error +======== Testing with C++ standard flags: '' syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +434. types.at:139: ok +435. types.at:139: ok +414. types.at:139: ok +462. scanner.at:326: ok input: -./calc.at:1344: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -19781,56 +19439,34 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 - | 1//2 -./calc.at:1337: $PREPARSER ./calc input ./calc.at:1341: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -input: stderr: -syntax error +stdout: +./calc.at:1343: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stdout: + +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.1: 2) -Stack now 0 -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected number - | - | +1 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1343: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + Starting parse Entering state 0 Stack now 0 @@ -19845,37 +19481,100 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) +Next token is token "number" (1.3: 2) 1.3: syntax error -Error: popping token '/' (1.2: ) -Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token "number" (1.3: 2) Stack now 0 +./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./scanner.at:326: $PREPARSER ./input +483. calc.at:1351: testing Calculator parse.error=detailed %locations %header %name-prefix "calc" api.token.prefix={TOK_} %verbose %yacc ... +./calc.at:1351: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +484. calc.at:1353: testing Calculator %debug ... +./calc.at:1353: mv calc.y.tmp calc.y + +485. calc.at:1354: testing Calculator parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... +./calc.at:1354: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +486. calc.at:1355: testing Calculator parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1353: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1355: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +487. calc.at:1357: testing Calculator api.pure=full parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... +./calc.at:1357: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +input: + | 1 2 +./calc.at:1334: $PREPARSER ./calc input stderr: -2.1: syntax error stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error +./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1343: $PREPARSER ./calc input +stderr: +stdout: +./calc.at:1344: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +input: +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1 2 + | 1 2 +stderr: +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1336: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -19896,171 +19595,10 @@ Stack now 0 Cleanup: discarding lookahead token "number" (1.1: 2) Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: -stderr: -./calc.at:1336: cat stderr -./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -2.1: syntax error -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1343: cat stderr -input: -./calc.at:1351: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -input: -./calc.at:1342: cat stderr - | - | +1 -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 2 -./calc.at:1340: cat stderr -./calc.at:1341: $PREPARSER ./calc input - | 1 2 -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1336: $PREPARSER ./calc input -input: -input: -stderr: -./calc.at:1337: cat stderr - | 1//2 -./calc.at:1338: cat stderr -./calc.at:1343: $PREPARSER ./calc input -syntax error - | 1 = 2 = 3 -input: -./calc.at:1342: $PREPARSER ./calc input -484. calc.at:1353: testing Calculator %debug ... -./calc.at:1353: mv calc.y.tmp calc.y - -stderr: -stderr: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: $PREPARSER ./calc /dev/null -syntax error, unexpected number - | error -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -stderr: -stderr: -stderr: -stderr: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -syntax error -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -stderr: -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -./calc.at:1353: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1334: cat stderr -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected number - | error -stderr: -stderr: -syntax error -stdout: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -input: -stderr: - | 1//2 -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1346: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -1.1: syntax error -stderr: -./calc.at:1337: $PREPARSER ./calc input -stderr: -syntax error -stderr: -./calc.at:1346: "$PERL" -ne ' +./calc.at:1344: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. -syntax error (eof || $. == 1) && /^\s*$/ # No trailing space. || /\s$/ @@ -20068,6 +19606,8 @@ || /\t/ )' calc.c +stderr: +461. scanner.at:326: ok Starting parse Entering state 0 Stack now 0 @@ -20076,48 +19616,45 @@ Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 81): +Reducing stack by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) -Stack now 0 8 +Next token is token "number" (1.3: 2) +1.3: syntax error Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.1: ) +Cleanup: discarding lookahead token "number" (1.3: 2) Stack now 0 + stderr: +stdout: +./types.at:139: $PREPARSER ./test +488. calc.at:1358: testing Calculator api.push-pull=both api.pure=full parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1358: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1358: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1357: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1355: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1353: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1344: "$PERL" -pi -e 'use strict; +stderr: +stderr: + | 1 2 +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20127,33 +19664,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.1: ) +./calc.at:1341: $PREPARSER ./calc input +stderr: syntax error -Error: popping token '/' (1.1: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.1: ) -Stack now 0 +syntax error +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -20168,38 +19684,8 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1346: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20210,31 +19696,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1343: cat stderr -./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1344: cat stderr -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20244,88 +19708,77 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: cat stderr -./calc.at:1341: cat stderr -input: -./calc.at:1342: cat stderr -input: -input: -./calc.at:1336: cat stderr -./calc.at:1340: cat stderr - | error - | 1 2 - | 1//2 -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1346: $PREPARSER ./calc input -stderr: -input: -input: -stderr: -./calc.at:1336: $PREPARSER ./calc /dev/null - | 1 = 2 = 3 -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1334: cat stderr -stderr: -input: - | 1//2 -syntax error, unexpected invalid token -./calc.at:1341: $PREPARSER ./calc input stderr: -stdout: stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' syntax error -stderr: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -stderr: -stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.1: 2) syntax error -stderr: +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.1: 2) +Stack now 0 +./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr --std=c++98 not supported -======== Testing with C++ standard flags: '' stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -1.3: syntax error -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: stderr: -syntax error stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stdout: stderr: +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' syntax error -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +stdout: syntax error +./calc.at:1348: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1347: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -syntax error, unexpected invalid token -syntax error +stderr: +stdout: +./calc.at:1346: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' input: - | error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1338: cat stderr +./calc.at:1358: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS + | 1 2 +./calc.at:1343: $PREPARSER ./calc input stderr: -1.3: syntax error -./calc.at:1334: $PREPARSER ./calc input +./calc.at:1346: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stderr: +./calc.at:1348: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: ./calc.at:1347: "$PERL" -ne ' chomp; @@ -20338,43 +19791,20 @@ || /\t/ )' calc.c -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +stdout: +stdout: syntax error +./types.at:139: ./check +./types.at:139: ./check stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./calc.at:1346: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1340: cat stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20385,17 +19815,51 @@ }eg ' expout || exit 77 input: -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1337: cat stderr +-std=c++98 not supported + | 1//2 +======== Testing with C++ standard flags: '' +./calc.at:1338: $PREPARSER ./calc input +stderr: +stderr: +stderr: +-std=c++98 not supported +======== Testing with C++ standard flags: '' +syntax error, unexpected number +input: +input: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1348: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1346: $PREPARSER ./calc input +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -20410,30 +19874,9 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1347: $PREPARSER ./calc input -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) -./calc.at:1344: cat stderr - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; +input: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20443,7 +19886,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20454,47 +19897,19 @@ }eg ' expout || exit 77 stderr: + | 1 2 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -input: -stdout: - | error -./calc.at:1346: cat stderr -./calc.at:1337: cat stderr stderr: -./types.at:139: ./check ./calc.at:1344: $PREPARSER ./calc input -./calc.at:1336: cat stderr -./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1338: cat stderr --std=c++98 not supported -======== Testing with C++ standard flags: '' -./calc.at:1343: cat stderr -./calc.at:1340: cat stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1341: cat stderr stderr: +======== Testing with C++ standard flags: '' +1.3: syntax error +======== Testing with C++ standard flags: '' +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | 1//2 -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20505,79 +19920,33 @@ }eg ' expout || exit 77 input: -syntax error, unexpected invalid token - | 1 2 -./calc.at:1347: $PREPARSER ./calc input -input: -input: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -input: -./calc.at:1336: $PREPARSER ./calc input -input: -stderr: - | (!!) + (1 2) = 1 -1.3: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: $PREPARSER ./calc input +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 + | 1//2 +./calc.at:1337: $PREPARSER ./calc input stderr: - | error -./calc.at:1341: $PREPARSER ./calc input -1.3: syntax error - | 1 = 2 = 3 -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: $PREPARSER ./calc input +./calc.at:1340: $PREPARSER ./calc input stderr: stderr: stderr: stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -1.3: syntax error +syntax error, unexpected number +./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error, unexpected number +./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: stdout: -./calc.at:1342: cat stderr -stderr: -input: -syntax error -input: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -1.11: syntax error -1.1-16: error: 2222 != 1 -stderr: -stderr: - | 1 = 2 = 3 - | - | +1 -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1340: $PREPARSER ./calc input -syntax error, unexpected '=' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./types.at:139: $PREPARSER ./test +./calc.at:1334: cat stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error stderr: stderr: stderr: -./calc.at:1334: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected invalid token syntax error -./calc.at:1342: $PREPARSER ./calc /dev/null -./scanner.at:326: $PREPARSER ./input -stderr: Starting parse Entering state 0 Stack now 0 @@ -20592,54 +19961,64 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error +Error: popping token '/' (1.2: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -syntax error, unexpected '=' +./types.at:139: ./check +stderr: +stderr: +stderr: +stdout: +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +./scanner.at:330: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc stderr: +stdout: ./types.at:139: $PREPARSER ./test input: +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: cat stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1 2 + | 1 2 +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1347: $PREPARSER ./calc input stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -syntax error -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1346: $PREPARSER ./calc input +stderr: +./scanner.at:330: $PREPARSER ./input +stderr: +syntax error, unexpected number +./calc.at:1336: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20649,11 +20028,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -syntax error + | 1//2 stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.3: syntax error +======== Testing with C++ standard flags: '' +stderr: +1.3: syntax error, unexpected number +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +syntax error +======== Testing with C++ standard flags: '' Starting parse Entering state 0 Stack now 0 @@ -20668,36 +20058,24 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error +Error: popping token '/' (1.2: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 - | 1 = 2 = 3 -./calc.at:1334: $PREPARSER ./calc input -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; +1.3: syntax error +./scanner.at:330: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20707,8 +20085,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error +input: +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 + | 1//2 stderr: +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1342: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -20723,36 +20107,23 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '=' (1.1: ) +Next token is token '/' (1.1: ) syntax error -Error: popping nterm exp (1.1: 2) -Stack now 0 8 19 -Error: popping token '=' (1.1: ) +Error: popping token '/' (1.1: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.1: ) +Cleanup: discarding lookahead token '/' (1.1: ) Stack now 0 +469. scanner.at:330: ok + +input: ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20762,27 +20133,23 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) -' expout || exit 77 - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -461. scanner.at:326: ok - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg ' expout || exit 77 -./calc.at:1347: cat stderr -./calc.at:1346: cat stderr -430. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1343: cat stderr +stderr: +stderr: + | 1//2 +./calc.at:1341: $PREPARSER ./calc input +stderr: +syntax error +syntax error +stderr: +stderr: +1.3: syntax error, unexpected number +1.3: syntax error +1.3: syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20793,29 +20160,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1344: cat stderr -input: +stderr: +syntax error +489. calc.at:1360: testing Calculator api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... Starting parse Entering state 0 Stack now 0 @@ -20830,126 +20182,46 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '=' (1.1: ) +Next token is token '/' (1.1: ) syntax error -Error: popping nterm exp (1.1: 2) -Stack now 0 8 19 -Error: popping token '=' (1.1: ) +Error: popping token '/' (1.1: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.1: ) +Cleanup: discarding lookahead token '/' (1.1: ) Stack now 0 -stderr: - | 1//2 -stdout: -./calc.at:1347: $PREPARSER ./calc input -input: - | error -./calc.at:1336: cat stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1338: cat stderr -./types.at:139: ./check -./calc.at:1341: cat stderr -stderr: --std=c++03 not supported -======== Testing with C++ standard flags: '' -./calc.at:1346: $PREPARSER ./calc input +./calc.at:1360: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi -input: -1.3: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -input: -./calc.at:1337: cat stderr -./calc.at:1343: cat stderr - | 1 = 2 = 3 - | (!!) + (1 2) = 1 - | (- *) + (1 2) = 1 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -1.1: syntax error -input: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: ./check stderr: +stdout: +./types.at:139: ./check stderr: - | 1 = 2 = 3 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -syntax error -error: 2222 != 1 -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error +stdout: +./types.at:139: ./check stderr: -1.1: syntax error -input: -./calc.at:1340: cat stderr - | - | +1 +stdout: +./types.at:139: $PREPARSER ./test stderr: +stdout: +./types.at:139: $PREPARSER ./test input: -./calc.at:1342: cat stderr -stderr: - | - | +1 -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1340: $PREPARSER ./calc /dev/null -syntax error, unexpected '=' -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -syntax error -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -syntax error -error: 2222 != 1 -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1338: cat stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20959,36 +20231,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: cat stderr stderr: + | 1//2 ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20998,48 +20244,9 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1343: $PREPARSER ./calc input ' expout || exit 77 -syntax error, unexpected '+' -./calc.at:1334: cat stderr -input: -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stderr: -stdout: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1342: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -stderr: -./types.at:139: ./check -syntax error, unexpected '=' -stderr: -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -21049,29 +20256,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -485. calc.at:1354: testing Calculator parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... -syntax error, unexpected '+' -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -syntax error -syntax error -input: -./calc.at:1347: cat stderr +stdout: +-std=c++98 not supported +======== Testing with C++ standard flags: '' -std=c++03 not supported ======== Testing with C++ standard flags: '' -./calc.at:1354: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1337: "$PERL" -pi -e 'use strict; +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -21081,241 +20273,71 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -486. calc.at:1355: testing Calculator parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... -./calc.at:1355: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1346: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | - | +1 -./calc.at:1334: $PREPARSER ./calc input -input: stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 - | error -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (1.1: ) -Stack now 0 -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -input: -./calc.at:1338: cat stderr - | 1 = 2 = 3 -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1343: cat stderr +./types.at:139: ./check +syntax error +stderr: stdout: +syntax error ./types.at:139: $PREPARSER ./test -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1337: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (1.1: ) -Stack now 0 -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -1.7: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stdout: +stdout: +-std=c++03 not supported +======== Testing with C++ standard flags: '' stderr: +./calc.at:1351: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: -./calc.at:1343: $PREPARSER ./calc /dev/null -1.1: syntax error -./calc.at:1341: cat stderr +./calc.at:1350: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +syntax error, unexpected '/', expecting number or '-' or '(' or '!' stderr: +syntax error ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1337: $PREPARSER ./calc /dev/null -1.7: syntax error -./calc.at:1340: cat stderr -input: input: +./calc.at:1337: cat stderr input: -syntax error, unexpected end of file -stderr: - | (- *) + (1 2) = 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 - | - | +1 -input: -./calc.at:1336: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1338: $PREPARSER ./calc input - | - | +1 -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1344: $PREPARSER ./calc input -stderr: ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '+' - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1340: $PREPARSER ./calc input -syntax error -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: cat stderr + | 1//2 + | error +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1350: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1351: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + ======== Testing with C++ standard flags: '' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +======== Testing with C++ standard flags: '' +416. types.at:139: ok stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1346: cat stderr +./calc.at:1353: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -21325,130 +20347,82 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: cat stderr +./calc.at:1347: cat stderr +./calc.at:1340: cat stderr +./calc.at:1360: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stderr: stderr: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. 1.1: syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -stdout: -syntax error, unexpected '+' -syntax error, unexpected end of file -stderr: -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -syntax error -syntax error -error: 2222 != 1 -syntax error -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:330: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./calc.at:1334: cat stderr -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error +syntax error, unexpected '/', expecting number or '-' or '(' or '!' + +syntax error, unexpected '/', expecting number or '-' or '(' or '!' input: - | (!!) + (1 2) = 1 -./scanner.at:330: $PREPARSER ./input -stderr: -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1334: $PREPARSER ./calc /dev/null -./calc.at:1346: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1350: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1351: $PREPARSER ./calc input stderr: +stdout: +./types.at:139: ./check stderr: -syntax error -error: 2222 != 1 -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: cat stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: +490. calc.at:1362: testing Calculator %no-lines api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1362: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +stdout: stdout: -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./scanner.at:330: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 +./types.at:139: ./check +./types.at:139: ./check stdout: -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +stdout: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg -syntax error -error: 2222 != 1 ' expout || exit 77 -input: -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -21458,23 +20432,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | - | +1 -./calc.at:1346: $PREPARSER ./calc input -./types.at:139: ./check -stderr: -./calc.at:1337: cat stderr -469. scanner.at:330: ok input: - | 1 = 2 = 3 -2.1: syntax error +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: ./check +-std=c++98 not supported +-std=c++03 not supported +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' + | error + | 1//2 +./types.at:139: ./check +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1337: $PREPARSER ./calc input stderr: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1348: "$PERL" -ne ' +./calc.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1353: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr (eof || $. == 1) && /^\s*$/ # No trailing space. || /\s$/ @@ -21482,24 +20461,9 @@ || /\t/ )' calc.c -./calc.at:1343: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -stderr: -./calc.at:1354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1341: cat stderr -./calc.at:1336: cat stderr -stderr: -2.1: syntax error -./calc.at:1344: cat stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +input: +input: +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -21509,3794 +20473,465 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: cat stderr -input: -1.7: syntax error input: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1341: $PREPARSER ./calc /dev/null - - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1//2 stderr: -./calc.at:1343: $PREPARSER ./calc input + | error +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +./calc.at:1340: $PREPARSER ./calc input +-std=c++98 not supported +./calc.at:1346: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' +-std=c++98 not supported +======== Testing with C++ standard flags: '' +-std=c++98 not supported +======== Testing with C++ standard flags: '' stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: -1.7: syntax error +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 stderr: stderr: -stdout: + | 1//2 +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1347: $PREPARSER ./calc input +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.1: syntax error input: - | 1 + 2 * 3 + !+ ++ + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: cat stderr +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +1.3: syntax error +1.3: syntax error syntax error -./types.at:139: $PREPARSER ./test +./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Next token is token '=' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.1: 7) +Shifting token "number" (1.1: 7) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 7) +-> $$ = nterm exp (1.1: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 7) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 7) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 7) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1338: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1340: cat stderr -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 - | (* *) + (*) + (*) -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -./calc.at:1344: $PREPARSER ./calc /dev/null -input: -stdout: -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | (!!) + (1 2) = 1 -./calc.at:1340: $PREPARSER ./calc input -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 8 21 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 6 8 21 30 22 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: -3) +-> $$ = nterm exp (1.1: -6) Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 6 8 21 30 +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: -6) +-> $$ = nterm exp (1.1: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (1.1: 5) +Shifting token "number" (1.1: 5) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 5) +-> $$ = nterm exp (1.1: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '\n' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 5) +-> $$ = nterm exp (1.1: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: -5) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: -5) +-> $$ = nterm exp (1.1: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: -5) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (1.1: ) +Shifting token '^' (1.1: ) +Entering state 24 +Stack now 0 6 2 10 24 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (1.1: ) +Reducing stack by rule 12 (line 117): + $1 = nterm exp (1.1: 1) + $2 = token '^' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: -1) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: -1) +-> $$ = nterm exp (1.1: -1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: -1) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: -1) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: -1) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: -1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '^' (1.1: ) +Shifting token '^' (1.1: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -syntax error -syntax error -syntax error -stderr: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr -syntax error, unexpected end of input -./types.at:139: ./check -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: cat stderr -stderr: -./calc.at:1355: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -error: 2222 != 1 -======== Testing with C++ standard flags: '' -input: -stderr: -./calc.at:1334: cat stderr -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: cat stderr - | 1 + 2 * 3 + !- ++ -syntax error, unexpected end of input -./calc.at:1346: $PREPARSER ./calc /dev/null -======== Testing with C++ standard flags: '' -./calc.at:1338: $PREPARSER ./calc input -stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: - | (- *) + (1 2) = 1 -stderr: -syntax error -syntax error -syntax error -./calc.at:1342: $PREPARSER ./calc input -syntax error -error: 2222 != 1 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | 1 2 -input: -./calc.at:1348: $PREPARSER ./calc input -1.1: syntax error -stderr: -stderr: -./calc.at:1347: cat stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -487. calc.at:1357: testing Calculator api.pure=full parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -error: 2222 != 1 -stderr: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error, unexpected number -./calc.at:1357: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1334: $PREPARSER ./calc input -1.1: syntax error -./calc.at:1343: cat stderr -./calc.at:1337: cat stderr -stderr: -./calc.at:1357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -syntax error -syntax error -error: 2222 != 1 -input: -stderr: - | - | +1 -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1344: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 2) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1338: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1347: $PREPARSER ./calc input -1.3: syntax error, unexpected number -./calc.at:1341: cat stderr -input: -input: -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1341: $PREPARSER ./calc input -input: -stderr: -input: -stderr: - | (!!) + (1 2) = 1 -./calc.at:1343: $PREPARSER ./calc input -2.1: syntax error - | 1 + 2 * 3 + !* ++ -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 2) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1338: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: -./calc.at:1340: cat stderr -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1336: cat stderr -stderr: -stderr: -./calc.at:1346: cat stderr -./types.at:139: ./check -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1348: cat stderr -1.14: memory exhausted -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr --std=c++98 not supported -======== Testing with C++ standard flags: '' -stderr: -./calc.at:1344: $PREPARSER ./calc input -input: -syntax error, unexpected number -error: 2222 != 1 -stderr: -stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1346: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: -2.1: syntax error -1.14: memory exhausted -stderr: - | 1 + 2 * 3 + !+ ++ - | (- *) + (1 2) = 1 -./calc.at:1340: $PREPARSER ./calc input -input: -stderr: - | 1//2 -./calc.at:1348: $PREPARSER ./calc input -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error -syntax error -error: 2222 != 1 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -syntax error -error: 2222 != 1 -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1334: cat stderr -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1341: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1343: cat stderr -./calc.at:1342: $PREPARSER ./calc input -stderr: -stdout: -./types.at:139: $PREPARSER ./test -input: -input: -./calc.at:1347: cat stderr -./calc.at:1338: cat stderr - | 1 + 2 * 3 + !- ++ - | (!!) + (1 2) = 1 -stderr: -stderr: -syntax error -syntax error -syntax error -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1334: $PREPARSER ./calc input -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1357: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - | (- *) + (1 2) = 1 -./calc.at:1347: $PREPARSER ./calc /dev/null -./calc.at:1343: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -stderr: -stderr: -======== Testing with C++ standard flags: '' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: ./check -./calc.at:1341: $PREPARSER ./calc input -input: -./calc.at:1337: cat stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 --std=c++98 not supported -======== Testing with C++ standard flags: '' -./calc.at:1338: $PREPARSER ./calc input -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -stderr: -stderr: -stderr: -stderr: -syntax error -error: 2222 != 1 -stderr: -syntax error -syntax error -syntax error -stdout: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1348: cat stderr -stderr: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -syntax error -error: 2222 != 1 -stdout: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -stdout: -1.1: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -input: -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1344: cat stderr -./calc.at:1346: cat stderr -stderr: -input: - | error -stderr: -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - | (- *) + (1 2) = 1 -./calc.at:1337: $PREPARSER ./calc input - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 --std=c++98 not supported -======== Testing with C++ standard flags: '' -stderr: -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1346: $PREPARSER ./calc input -stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -======== Testing with C++ standard flags: '' -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1344: $PREPARSER ./calc input - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1340: cat stderr -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stderr: -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1336: cat stderr -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error, unexpected number -error: 2222 != 1 -1.1: syntax error, unexpected invalid token -input: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1347: cat stderr -stdout: - | (* *) + (*) + (*) -./calc.at:1340: $PREPARSER ./calc input -stderr: -./calc.at:1341: cat stderr -./calc.at:1343: cat stderr -stderr: -./calc.at:1350: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1334: cat stderr -./calc.at:1342: cat stderr -stderr: -stdout: -syntax error -syntax error -syntax error -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./types.at:139: ./check -1.11: syntax error -1.1-16: error: 2222 != 1 -input: -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 --std=c++03 not supported -======== Testing with C++ standard flags: '' -input: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: - | (- *) + (1 2) = 1 - | 1 + 2 * 3 + !* ++ -./calc.at:1341: $PREPARSER ./calc input - | (- *) + (1 2) = 1 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1347: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1350: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stderr: -input: -stdout: -./calc.at:1338: cat stderr -stderr: -stderr: -stderr: -./calc.at:1353: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | (* *) + (*) + (*) -./calc.at:1343: $PREPARSER ./calc input -memory exhausted -syntax error -syntax error -error: 2222 != 1 -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | 1 + 2 * 3 + !+ ++ -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1348: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1353: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + #) = 1111 -stderr: -input: -stderr: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1338: $PREPARSER ./calc input -memory exhausted -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | 1 = 2 = 3 -./calc.at:1348: $PREPARSER ./calc input -syntax error -syntax error -error: 2222 != 1 -input: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1353: $PREPARSER ./calc input -stderr: -./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: -stderr: -1.7: syntax error, unexpected '=' -1.6: syntax error: invalid character: '#' -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: cat stderr -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -stdout: -input: -./types.at:139: ./check -./calc.at:1344: cat stderr -input: -./types.at:139: ./check -input: -./calc.at:1346: cat stderr - | 1 2 - | 1 + 2 * 3 + !- ++ -stderr: -stderr: -stderr: -1.7: syntax error, unexpected '=' - | (* *) + (*) + (*) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 7) -Shifting token "number" (1.1: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 7) --> $$ = nterm exp (1.1: 7) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 7) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 7) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 7) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: -3) --> $$ = nterm exp (1.1: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: -6) --> $$ = nterm exp (1.1: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (1.1: 5) -Shifting token "number" (1.1: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 5) --> $$ = nterm exp (1.1: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 5) --> $$ = nterm exp (1.1: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: -5) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: -5) --> $$ = nterm exp (1.1: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: -5) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (1.1: ) -Shifting token '^' (1.1: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 12 (line 117): - $1 = nterm exp (1.1: 1) - $2 = token '^' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: -1) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: -1) --> $$ = nterm exp (1.1: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: -1) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 ./calc.at:1337: $PREPARSER ./calc input -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1350: $PREPARSER ./calc input -(line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: -1) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (1.1: ) -Shifting token '^' (1.1: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 12 (line 117): - $1 = nterm exp (1.1: -1) - $2 = token '^' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '=' (1.1: ) +Reducing stack by rule 12 (line 117): + $1 = nterm exp (1.1: -1) + $2 = token '^' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) @@ -25916,232 +21551,40 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -1.6: syntax error: invalid character: '#' ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./calc.at:1351: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1340: cat stderr -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; --std=c++98 not supported -======== Testing with C++ standard flags: '' -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1341: "$PERL" -pi -e 'use strict; +input: +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: cat stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: --std=c++03 not supported -======== Testing with C++ standard flags: '' -stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1362: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS + | 1 2 +./calc.at:1350: $PREPARSER ./calc input stderr: stderr: -./types.at:139: ./check -1.3: syntax error, unexpected number stderr: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: --std=c++98 not supported -======== Testing with C++ standard flags: '' -stdout: -./calc.at:1351: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stderr: -./types.at:139: ./check +Next token is token "invalid token" (1.1: ) +1.1: syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -27160,12 +22603,11 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) input: -input: -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1341: cat stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" @@ -27173,27 +22615,75 @@ }eg ' expout || exit 77 input: -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1338: "$PERL" -pi -e 'use strict; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | error + | 1 2 +./calc.at:1334: $PREPARSER ./calc input +./calc.at:1351: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +1.3: syntax error, unexpected number +1.3: syntax error +syntax error +1.3: syntax error +stderr: +stderr: +stderr: +stdout: +stdout: +stdout: +./types.at:139: ./check +./types.at:139: ./check +./types.at:139: ./check +stderr: +stdout: +./types.at:139: ./check +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1340: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1344: $PREPARSER ./calc input +input: +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1343: cat stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -std=c++03 not supported +stderr: ======== Testing with C++ standard flags: '' -./calc.at:1348: "$PERL" -pi -e 'use strict; +stderr: + | error +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | error +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1342: $PREPARSER ./calc input +-std=c++03 not supported +======== Testing with C++ standard flags: '' +1.3: syntax error, unexpected number +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +stderr: +stdout: +./types.at:139: ./check +stderr: +stderr: +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27203,289 +22693,69 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1341: cat stderr -stderr: -stderr: input: -stderr: -stderr: -./calc.at:1336: cat stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1351: $PREPARSER ./calc input -1.3: syntax error, unexpected number -stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) -input: ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ' expout || exit 77 -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: cat stderr -./calc.at:1334: cat stderr -stdout: input: -./calc.at:1348: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stdout: | 1 2 -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./calc.at:1343: cat stderr -stderr: - | (* *) + (*) + (*) -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1354: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1353: $PREPARSER ./calc input -./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./types.at:139: ./check stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1342: cat stderr -input: +./calc.at:1341: $PREPARSER ./calc input +-std=c++98 not supported +======== Testing with C++ standard flags: '' stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1338: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; +syntax error +syntax error +1.3: syntax error, unexpected number +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - | (#) + (#) = 2222 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1338: cat stderr +input: +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.1: 2) -Stack now 0 -stderr: -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: cat stderr + | error +stderr: +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./calc.at:1343: $PREPARSER ./calc input +stderr: +stderr: +syntax error Starting parse Entering state 0 Stack now 0 @@ -27506,49 +22776,32 @@ Stack now 0 Cleanup: discarding lookahead token "number" (1.1: 2) Stack now 0 +1.3: syntax error, unexpected number +./calc.at:1354: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h stderr: -input: -input: -input: - | (!!) + (1 2) = 1 -./calc.at:1347: $PREPARSER ./calc input -input: -input: - | 1 2 - | 1 + 2 * 3 + !+ ++ -./calc.at:1343: $PREPARSER ./calc input -syntax error -syntax error + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) syntax error - | - | +1 -./calc.at:1351: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1340: $PREPARSER ./calc input -stderr: -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1350: cat stderr -input: -input: -stderr: -input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: - | 1 + 2 * 3 + !* ++ - | (# + 1) = 1111 -./calc.at:1342: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 -stderr: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: $PREPARSER ./calc input - | (* *) + (*) + (*) +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 stderr: -1.3: syntax error, unexpected number -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27558,8 +22811,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -2.1: syntax error, unexpected '+' -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $PREPARSER ./test +stderr: +./calc.at:1348: cat stderr +stderr: +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27569,19 +22827,47 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: +stdout: stderr: -1.2: syntax error: invalid character: '#' -1.11: syntax error -1.1-16: error: 2222 != 1 +syntax error, unexpected invalid token +syntax error +syntax error +./calc.at:1355: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +input: stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1354: $PREPARSER ./calc input +stdout: stderr: stderr: +stdout: +./types.at:139: ./check +stdout: +./types.at:139: ./check +./calc.at:1357: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; +input: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: cat stderr +stdout: +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1 = 2 = 3 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27591,3691 +22877,156 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1338: $PREPARSER ./calc input +-std=c++03 not supported +======== Testing with C++ standard flags: '' + | error +./types.at:139: ./check +./calc.at:1344: $PREPARSER ./calc input +stderr: +stderr: +stderr: +./calc.at:1355: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1357: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '*' (1.1: ) +Next token is token "number" (1.1: 2) syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.1: 2) +Stack now 0 +syntax error +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -memory exhausted -stdout: - | 1//2 -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: ./check -stderr: -./calc.at:1337: cat stderr -./calc.at:1344: cat stderr -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.3: syntax error, unexpected number -2.1: syntax error, unexpected '+' --std=c++03 not supported -======== Testing with C++ standard flags: '' -./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1346: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -input: -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -memory exhausted -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -input: -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !- ++ - | (* *) + (*) + (*) -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1336: cat stderr -stderr: -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -======== Testing with C++ standard flags: '' -./calc.at:1346: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1351: cat stderr -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: $PREPARSER ./calc input -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1341: cat stderr -./calc.at:1348: cat stderr -input: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1//2 -stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1347: cat stderr -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1340: cat stderr - | (1 + #) = 1111 -input: -./calc.at:1353: $PREPARSER ./calc input -input: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' - | 1//2 - | 1 + 2 * 3 + !- ++ -input: -./calc.at:1348: $PREPARSER ./calc /dev/null -./calc.at:1337: $PREPARSER ./calc input -input: -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1351: $PREPARSER ./calc input -stderr: - | (- *) + (1 2) = 1 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.1: ) -Stack now 0 -./calc.at:1334: cat stderr -stderr: - | 1 + 2 * 3 + !+ ++ -input: -stderr: -./calc.at:1341: $PREPARSER ./calc input -stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error, unexpected end of file -stderr: -./calc.at:1338: cat stderr -stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: cat stderr -./calc.at:1347: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -stdout: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1340: $PREPARSER ./calc input -stderr: -stderr: -stdout: -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -1.1: syntax error, unexpected end of file -stderr: -./types.at:139: ./check -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.1: ) -Stack now 0 -stderr: -./types.at:139: ./check -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' --std=c++03 not supported -input: -======== Testing with C++ standard flags: '' -./calc.at:1346: cat stderr -stderr: -memory exhausted -syntax error: invalid character: '#' -stderr: - | (#) + (#) = 2222 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -input: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1344: cat stderr -input: -input: -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1342: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | error - | 1 + 2 * 3 + !+ ++ -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1338: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1346: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1343: cat stderr -stderr: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -memory exhausted -1.1: syntax error, unexpected invalid token -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: cat stderr -input: -stderr: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -input: -stderr: - | 1 + 2 * 3 + !- ++ -stdout: -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1351: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1344: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -stderr: -stderr: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1348: $PREPARSER ./calc input -stderr: -1.1: syntax error, unexpected invalid token - | 1 + 2 * 3 + !* ++ -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | error -stderr: -./calc.at:1351: $PREPARSER ./calc input -./calc.at:1337: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -1.6: syntax error: invalid character: '#' -./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: cat stderr -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error, unexpected invalid token -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: -./calc.at:1347: cat stderr -input: -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -memory exhausted -stderr: -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 + 2 * 3 + !* ++ -input: -./calc.at:1337: $PREPARSER ./calc input - | error - | 1 + 2 * 3 + !- ++ -1.1: syntax error, unexpected invalid token -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: $PREPARSER ./calc input -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -input: -memory exhausted -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr -stderr: -input: -stderr: - | 1 + 2 * 3 + !- ++ - | (* *) + (*) + (*) -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1347: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -stderr: -./calc.at:1350: cat stderr -input: -./calc.at:1338: cat stderr -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./calc.at:1340: cat stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -input: -syntax error: invalid character: '#' -stderr: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 - | (1 + 1) / (1 - 1) -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1350: $PREPARSER ./calc input -input: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error - | (#) + (#) = 2222 -stderr: -./calc.at:1341: cat stderr -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11-17: error: null divisor -./calc.at:1342: cat stderr -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error, unexpected '=' -stderr: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1343: cat stderr -./calc.at:1348: cat stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -1.11-17: error: null divisor -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1341: $PREPARSER ./calc input -1.7: syntax error, unexpected '=' - | (1 + #) = 1111 -./calc.at:1342: $PREPARSER ./calc input -stderr: -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -memory exhausted -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' - | (#) + (#) = 2222 -./calc.at:1343: $PREPARSER ./calc input -stderr: - | 1 = 2 = 3 -./calc.at:1351: $PREPARSER ./calc input -./calc.at:1346: cat stderr -./calc.at:1344: cat stderr -./calc.at:1337: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stderr: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1353: cat stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1347: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./types.at:139: ./check -syntax error: invalid character: '#' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error, unexpected '=' -memory exhausted -stderr: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1334: cat stderr -stderr: -input: -./calc.at:1338: cat stderr -stderr: -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: - | 1 + 2 * 3 + !* ++ -input: -stderr: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1344: $PREPARSER ./calc input -1.7: syntax error, unexpected '=' -./calc.at:1350: cat stderr -stdout: -stderr: -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1346: $PREPARSER ./calc input -stderr: -./types.at:139: ./check -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 - | 1 = 2 = 3 -stderr: -./calc.at:1353: $PREPARSER ./calc input -stderr: -1.14: memory exhausted -memory exhausted -./calc.at:1336: cat stderr -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -473. calc.at:1338: -std=c++98 not supported -======== Testing with C++ standard flags: '' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok - | (#) + (#) = 2222 -input: -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.1: ) -syntax error -Error: popping nterm exp (1.1: 2) -Stack now 0 8 19 -Error: popping token '=' (1.1: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.1: ) -Stack now 0 -memory exhausted -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.14: memory exhausted -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 -stderr: -./calc.at:1351: cat stderr -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1350: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.1: ) -syntax error -Error: popping nterm exp (1.1: 2) -Stack now 0 8 19 -Error: popping token '=' (1.1: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.1: ) -Stack now 0 - | (1 + # + 1) = 1111 -./calc.at:1340: cat stderr -./calc.at:1336: $PREPARSER ./calc input -stderr: -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -2.1: syntax error, unexpected '+' -stderr: -input: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: - | - | +1 -syntax error: invalid character: '#' - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1348: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: - - | 1 + 2 * 3 + !- ++ -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -2.1: syntax error, unexpected '+' -./calc.at:1342: cat stderr -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -2.1: syntax error, unexpected '+' -stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1344: cat stderr -stderr: -input: -stderr: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: cat stderr -stdout: -syntax error: invalid character: '#' -2.1: syntax error, unexpected '+' -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1348: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -input: - | (# + 1) = 1111 -stderr: -./calc.at:1343: cat stderr - | (1 + #) = 1111 -./calc.at:1340: $PREPARSER ./calc input -input: -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1341: cat stderr -input: -stderr: -stderr: - | (#) + (#) = 2222 -stderr: -./calc.at:1346: $PREPARSER ./calc input - | (#) + (#) = 2222 -syntax error: invalid character: '#' -./calc.at:1344: $PREPARSER ./calc input -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stderr: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -syntax error: invalid character: '#' - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1353: cat stderr -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stdout: -stderr: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -input: -syntax error: invalid character: '#' -./types.at:139: $PREPARSER ./test -stderr: -stderr: -syntax error: invalid character: '#' - | (#) + (#) = 2222 -./calc.at:1341: $PREPARSER ./calc input -stderr: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1337: cat stderr -stderr: -stderr: -./calc.at:1350: cat stderr -syntax error: invalid character: '#' -input: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: cat stderr -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: $PREPARSER ./calc input -488. calc.at:1358: testing Calculator api.push-pull=both api.pure=full parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... -input: -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -stderr: -./calc.at:1358: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1351: cat stderr -./calc.at:1350: $PREPARSER ./calc /dev/null -stderr: -stderr: -syntax error: invalid character: '#' -======== Testing with C++ standard flags: '' -./calc.at:1348: cat stderr -./calc.at:1347: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (1.1: ) -Stack now 0 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1337: $PREPARSER ./calc input -1.1: syntax error, unexpected end of file -./calc.at:1351: $PREPARSER ./calc /dev/null -======== Testing with C++ standard flags: '' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - | (#) + (#) = 2222 - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.1: syntax error, unexpected end of file -stderr: -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr -1.1: syntax error, unexpected end of file -./calc.at:1358: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1348: $PREPARSER ./calc input -stderr: -stderr: -stdout: -./calc.at:1347: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -1.1: syntax error, unexpected end of file -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (1.1: ) -Stack now 0 -stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -1.14: memory exhausted -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $PREPARSER ./test -input: -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - - | (1 + 1) / (1 - 1) -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1336: $PREPARSER ./calc input -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -input: -./calc.at:1342: cat stderr -./calc.at:1346: cat stderr -stderr: -./calc.at:1340: cat stderr - | (1 + #) = 1111 -./calc.at:1344: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -input: -./calc.at:1343: cat stderr -stderr: -error: null divisor -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: cat stderr -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1350: cat stderr - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1355: $PREPARSER ./calc input - | (1 + #) = 1111 -======== Testing with C++ standard flags: '' -input: -./calc.at:1351: cat stderr -stderr: - | (1 + # + 1) = 1111 -1.14: memory exhausted -./calc.at:1342: $PREPARSER ./calc input -stderr: -stderr: -input: -syntax error: invalid character: '#' -error: null divisor -input: -input: -./calc.at:1348: cat stderr -./calc.at:1353: cat stderr -./calc.at:1346: $PREPARSER ./calc input -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | (# + 1) = 1111 -stderr: -./calc.at:1340: $PREPARSER ./calc input -syntax error: invalid character: '#' - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -input: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: $PREPARSER ./calc input -./calc.at:1353: $PREPARSER ./calc /dev/null - | (1 + #) = 1111 -./calc.at:1341: $PREPARSER ./calc input -stdout: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1350: $PREPARSER ./calc input -input: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./types.at:139: ./check -./calc.at:1337: cat stderr -stderr: -stderr: - | (# + 1) = 1111 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 --std=c++03 not supported -======== Testing with C++ standard flags: '' -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error: invalid character: '#' -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -1.6: syntax error: invalid character: '#' -stderr: -input: -syntax error: invalid character: '#' -stderr: -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token @@ -32197,184 +23948,21 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | (# + 1) = 1111 -stderr: -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stderr: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr +./calc.at:1347: cat stderr input: -syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1350: cat stderr + | error +-std=c++03 not supported +======== Testing with C++ standard flags: '' stderr: -./calc.at:1357: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 - | 1 + 2 * 3 + !+ ++ ./calc.at:1348: $PREPARSER ./calc input stderr: -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1357: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - +1.7: syntax error Starting parse Entering state 0 Stack now 0 @@ -32694,7 +24282,8 @@ Shifting token '\n' (4.10-5.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by syntax error, unexpected invalid token +rule 4 (line 97): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) @@ -33392,132 +24981,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1344: cat stderr -stdout: -stderr: -./calc.at:1350: cat stderr -./calc.at:1347: cat stderr -./calc.at:1336: cat stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: ./check -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' +syntax error, unexpected invalid token +======== Testing with C++ standard flags: '' +input: input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -33532,47 +24998,29 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -input: -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1355: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1357: $PREPARSER ./calc input -input: - | (# + 1) = 1111 --std=c++98 not supported -======== Testing with C++ standard flags: '' -./calc.at:1344: $PREPARSER ./calc input -input: stderr: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: +stdout: +./types.at:139: ./check stderr: - | (1 + #) = 1111 - | 1 2 -./calc.at:1355: $PREPARSER ./calc input -syntax error: invalid character: '#' +stdout: +input: ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -471. calc.at:1336: ok -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -33582,155 +25030,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: cat stderr -stderr: -./calc.at:1351: cat stderr -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1350: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1340: cat stderr -./calc.at:1342: cat stderr -stderr: -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1351: cat stderr +./types.at:139: ./check stderr: - | (#) + (#) = 2222 -./calc.at:1341: cat stderr -input: -syntax error: invalid character: '#' +-std=c++03 not supported +======== Testing with C++ standard flags: '' + | 1 = 2 = 3 +./calc.at:1337: $PREPARSER ./calc input +1.1: syntax error, unexpected invalid token stderr: Starting parse Entering state 0 @@ -34750,49 +26062,7 @@ Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 - | 1 + 2 * 3 + !- ++ -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1353: cat stderr Starting parse Entering state 0 Stack now 0 @@ -34838,7 +26108,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -34846,7 +26116,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -34938,14 +26208,14 @@ Stack now 0 6 8 21 30 22 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Stack now 0 6 8 21 30 22 31 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -34953,7 +26223,7 @@ Entering state 30 Stack now 0 6 8 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -34981,7 +26251,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -35059,7 +26329,7 @@ Stack now 0 6 2 10 24 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -35067,7 +26337,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -35094,7 +26364,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -35112,14 +26382,7 @@ Shifting token '\n' (4.10-5.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by input: -input: - | (1 + # + 1) = 1111 -stderr: -input: -./calc.at:1343: cat stderr -./calc.at:1340: $PREPARSER ./calc input -rule 4 (line 97): +Reducing stack by rule 4 (line 97): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) @@ -35153,7 +26416,7 @@ Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -35163,7 +26426,7 @@ Shifting token ')' (5.4: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -35187,7 +26450,7 @@ Stack now 0 6 8 24 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -35276,21 +26539,21 @@ Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -35317,7 +26580,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -35390,7 +26653,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -35413,7 +26676,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -35441,8 +26704,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducinstderr: -g stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -35519,7 +26781,7 @@ Stack now 0 6 8 20 4 12 20 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -35530,7 +26792,7 @@ Shifting token ')' (10.11: ) Entering state 27 Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -35539,7 +26801,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -35643,7 +26905,7 @@ Stack now 0 6 8 24 33 24 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -35651,7 +26913,7 @@ Entering state 33 Stack now 0 6 8 24 33 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -35729,7 +26991,7 @@ Stack now 0 6 4 12 24 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -35740,7 +27002,7 @@ Shifting token ')' (13.5: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -35764,7 +27026,7 @@ Stack now 0 6 8 24 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -35818,167 +27080,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | (# + 1) = 1111 -./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: - | (1 + 1) / (1 - 1) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -stdout: -stderr: - -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1342: $PREPARSER ./calc input -input: -syntax error: invalid character: '#' -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1337: cat stderr - | (# + 1) = 1111 -./types.at:139: ./check -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: null divisor --std=c++98 not supported - | (!!) + (1 2) = 1 -======== Testing with C++ standard flags: '' -./calc.at:1351: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: - | (1 + # + 1) = 1111 -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1358: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -stderr: -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stdout: -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -35988,21 +27093,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -syntax error: invalid character: '#' -input: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -input: -./calc.at:1350: cat stderr -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -36012,9 +27103,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + # + 1) = 1111 - | 1 2 -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -36024,1478 +27113,60 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1357: $PREPARSER ./calc input -stderr: --std=c++98 not supported -======== Testing with C++ standard flags: '' -syntax error: invalid character: '#' -stderr: -./calc.at:1344: cat stderr -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: null divisor -stderr: -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1334: cat stderr -stderr: -stderr: input: -./calc.at:1355: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -stdout: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 2) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1//2 stderr: - | (- *) + (1 2) = 1 + | error + | error +./calc.at:1347: $PREPARSER ./calc input ./calc.at:1350: $PREPARSER ./calc input -./types.at:139: ./check -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -syntax error: invalid character: '#' -stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: cat stderr -input: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 --std=c++98 not supported +-std=c++03 not supported +./calc.at:1346: $PREPARSER ./calc input ======== Testing with C++ standard flags: '' - | 1//2 -./calc.at:1347: cat stderr -input: -stderr: -stderr: -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + # + 1) = 1111 stderr: -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1344: $PREPARSER ./calc input -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 - | (# + 1) = 1111 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 2) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -syntax error: invalid character: '#' -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: -stderr: -./calc.at:1354: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | 1 + 2 * 3 + !* ++ -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -489. calc.at:1360: testing Calculator api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1360: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1340: cat stderr -./calc.at:1346: cat stderr -./calc.at:1348: $PREPARSER ./calc input - | (1 + #) = 1111 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1347: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -syntax error: invalid character: '#' -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stdout: -./types.at:139: ./check -./calc.at:1354: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: cat stderr -./calc.at:1351: cat stderr -1.14: memory exhausted -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: -./calc.at:1341: cat stderr -input: -./calc.at:1343: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1346: $PREPARSER ./calc input -stderr: -input: -input: -1.6: syntax error: invalid character: '#' -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | 1//2 - | (1 + 1) / (1 - 1) - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1342: cat stderr -./calc.at:1354: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 +Next token is token '=' (1.7: ) +1.7: syntax error +Error: popping nterm exp (1.5: 2) Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1351: $PREPARSER ./calc input -stderr: -stderr: -error: null divisor -stderr: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -input: -1.6: syntax error: invalid character: '#' -1.14: memory exhausted -./calc.at:1337: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1355: cat stderr -./calc.at:1341: $PREPARSER ./calc input -stderr: -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -stderr: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: stderr: -error: null divisor Starting parse Entering state 0 Stack now 0 @@ -37815,26 +27486,7 @@ Shifting token '\n' (4.10-5.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by 1.6: syntax error: invalid character: '#' -476. calc.at:1342: ok -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -rule 4 (line 97): +Reducing stack by rule 4 (line 97): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) @@ -38532,245 +28184,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 - | error -./calc.at:1355: $PREPARSER ./calc input -stderr: -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1343: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1353: cat stderr -./calc.at:1337: $PREPARSER ./calc input -stderr: -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1344: cat stderr -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stderr: -stderr: -stderr: -stderr: -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1350: cat stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -38816,7 +28230,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -38824,7 +28238,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -38916,14 +28330,14 @@ Stack now 0 6 8 21 30 22 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Stack now 0 6 8 21 30 22 31 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -38931,7 +28345,7 @@ Entering state 30 Stack now 0 6 8 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -38959,7 +28373,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -39037,7 +28451,7 @@ Stack now 0 6 2 10 24 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -39045,7 +28459,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -39072,7 +28486,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -39124,7 +28538,7 @@ Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -39134,7 +28548,7 @@ Shifting token ')' (5.4: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 138): +Reducing stack by rule 13 (line 126): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -39158,7 +28572,7 @@ Stack now 0 6 8 24 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -39247,21 +28661,21 @@ Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -39288,7 +28702,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -39361,7 +28775,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -39384,7 +28798,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -39412,7 +28826,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -39489,7 +28903,7 @@ Stack now 0 6 8 20 4 12 20 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -39500,7 +28914,7 @@ Shifting token ')' (10.11: ) Entering state 27 Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 138): +Reducing stack by rule 13 (line 126): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -39509,7 +28923,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -39613,7 +29027,7 @@ Stack now 0 6 8 24 33 24 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -39621,7 +29035,7 @@ Entering state 33 Stack now 0 6 8 24 33 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -39699,7 +29113,7 @@ Stack now 0 6 4 12 24 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -39710,7 +29124,7 @@ Shifting token ')' (13.5: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 138): +Reducing stack by rule 13 (line 126): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -39734,7 +29148,7 @@ Stack now 0 6 8 24 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -39788,178 +29202,11 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -error: null divisor -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1357: cat stderr -stdout: -stderr: -./calc.at:1348: cat stderr -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 +1.7: syntax error +syntax error, unexpected invalid token input: +./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -39970,23 +29217,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1334: cat stderr -stderr: input: -error: null divisor - | (1 + 1) / (1 - 1) input: -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1347: cat stderr - | (* *) + (*) + (*) -./calc.at:1350: $PREPARSER ./calc input +./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stderr: | 1 2 -./calc.at:1346: cat stderr - | (!!) + (1 2) = 1 -input: + | 1 = 2 = 3 + | 1//2 +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1340: $PREPARSER ./calc input stderr: - | (#) + (#) = 2222 -./calc.at:1351: "$PERL" -pi -e 'use strict; +./calc.at:1351: $PREPARSER ./calc input +1.1: syntax error +1.1: syntax error +stderr: +stderr: +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stdout: +1.1: syntax error, unexpected invalid token +stdout: +./types.at:139: ./check +./types.at:139: ./check +stderr: +stdout: +./types.at:139: ./check +stderr: +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -39996,192 +29258,83 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1334: cat stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: stderr: -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1353: $PREPARSER ./calc input -input: -error: null divisor stderr: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: stderr: -input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +syntax error +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./types.at:139: $PREPARSER ./test +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): +Stack now 0 1 +Reducing stack by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | error -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1357: $PREPARSER ./calc input -stderr: -stderr: -./types.at:139: ./check -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1340: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./calc.at:1342: cat stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40191,163 +29344,75 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1353: cat stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: cat stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1360: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS + | 1 2 + | 1 2 +./calc.at:1357: $PREPARSER ./calc input stderr: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr --std=c++03 not supported +./calc.at:1355: $PREPARSER ./calc input stderr: -./calc.at:1351: cat stderr - | (# + 1) = 1111 -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1347: $PREPARSER ./calc input stderr: -stdout: -./calc.at:1355: cat stderr stderr: -435. types.at:139: ======== Testing with C++ standard flags: '' - ok +1.1: syntax error +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.1: syntax error +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: cat stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1343: cat stderr + | 1 = 2 = 3 stderr: +./calc.at:1334: $PREPARSER ./calc input stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -1.11-17: error: null divisor -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./types.at:139: ./check -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -40369,149 +29434,32 @@ Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 stderr: -stderr: +syntax error +stdout: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 stderr: -./calc.at:1343: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40521,94 +29469,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1341: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -stderr: -1.2: syntax error: invalid character: '#' +input: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -error: null divisor -stderr: -474. calc.at:1340: ok -1.11-17: error: null divisor + | 1 = 2 = 3 + | 1//2 +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1353: $PREPARSER ./calc input stderr: --std=c++03 not supported -======== Testing with C++ standard flags: '' -input: - | (* *) + (*) + (*) + | 1 = 2 = 3 +./calc.at:1342: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token @@ -40617,64 +29501,36 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Stack now 0 8 19 +Error: popping token '=' (1.1: ) Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stdout: -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1343: cat stderr +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.1: ) +Stack now 0 +./scanner.at:326: $PREPARSER ./input stderr: -./calc.at:1351: $PREPARSER ./calc input +436. types.at:139: ok +stdout: ./types.at:139: ./check -./calc.at:1348: "$PERL" -pi -e 'use strict; +stderr: +stdout: + +./calc.at:1358: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40684,20 +29540,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1341: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | 1 = 2 = 3 -./calc.at:1355: $PREPARSER ./calc input -stdout: --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: ./check -stderr: - -stderr: ./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -40708,33 +29550,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +input: ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -477. calc.at:1343: stderr: - ok -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr +./calc.at:1344: cat stderr +./calc.at:1348: cat stderr +stderr: +stderr: +stderr: + | 1 = 2 = 3 + | 1 = 2 = 3 +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40743,72 +29580,56 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1343: $PREPARSER ./calc input ' expout || exit 77 -./calc.at:1350: cat stderr -std=c++03 not supported +======== Testing with C++ standard flags: '' +stderr: +./calc.at:1341: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '/' (1.1: ) +syntax error +Error: popping token '/' (1.1: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '/' (1.1: ) Stack now 0 -error: null divisor +syntax error +syntax error +./calc.at:1358: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' stderr: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -490. calc.at:1362: testing Calculator %no-lines api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: +stdout: stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -40823,48 +29644,39 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./types.at:139: $PREPARSER ./test +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1337: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1350: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1348: cat stderr -./calc.at:1357: cat stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1362: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./types.at:139: ./check +491. calc.at:1363: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1363: if "$POSIXLY_CORRECT_IS_EXPORTED"; then sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y else @@ -40872,8 +29684,9 @@ fi -stderr: -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40883,9 +29696,78 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -error: null divisor -./calc.at:1354: "$PERL" -pi -e 'use strict; +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +syntax error, unexpected '=' +stderr: +syntax error +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Stack now 0 8 19 +Error: popping token '=' (1.1: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.1: ) +Stack now 0 +stdout: +input: +./types.at:139: ./check +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1358: $PREPARSER ./calc input +stderr: +stdout: +./types.at:139: ./check +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40895,13 +29777,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: cat stderr -./calc.at:1346: cat stderr -stderr: -./calc.at:1354: cat stderr +./calc.at:1337: cat stderr input: -./calc.at:1351: "$PERL" -pi -e 'use strict; +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1350: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1354: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40911,7 +29796,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40921,106 +29806,57 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1353: cat stderr -./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | 1 = 2 = 3 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1334: cat stderr -478. calc.at:1344: ok -./calc.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -472. calc.at:1337: ok - | (1 + #) = 1111 -./calc.at:1351: cat stderr -input: -479. calc.at:1346: ok -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1347: cat stderr + | + | +1 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1338: $PREPARSER ./calc input +-std=c++98 not supported +======== Testing with C++ standard flags: '' stderr: -1.6: syntax error: invalid character: '#' -input: - | 1//2 -input: -491. calc.at:1363: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1363: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - +======== Testing with C++ standard flags: '' stderr: -./calc.at:1355: cat stderr - | (- *) + (1 2) = 1 -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '/' (1.1: ) +syntax error +Error: popping token '/' (1.1: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '/' (1.1: ) Stack now 0 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1350: $PREPARSER ./calc input -input: -input: -stderr: + | 1 = 2 = 3 stderr: +./calc.at:1348: $PREPARSER ./calc input +syntax error stderr: +syntax error stderr: - | (1 + 1) / (1 - 1) - | 1 + 2 * 3 + !+ ++ -./calc.at:1351: $PREPARSER ./calc input +463. scanner.at:326: ok +stdout: +stdout: +./types.at:139: ./check +./types.at:139: ./check stderr: - Starting parse Entering state 0 Stack now 0 @@ -41034,535 +29870,1117 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -1.6: syntax error: invalid character: '#' - -./calc.at:1347: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Return for a new token: Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Return for a new token: +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Stack now 0 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Return for a new token: Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Return for a new token: Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Return for a new token: Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -492. calc.at:1364: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %defines api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1364: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -input: - | - | +1 -./calc.at:1355: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Return for a new token: Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Return for a new token: Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Return for a new token: +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 23 4 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Return for a new token: Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Return for a new token: Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Return for a new token: +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Return for a new token: +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 -Stack now 0 4 +Stack now 0 6 4 +Return for a new token: Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 -Stack now 0 4 2 +Stack now 0 6 4 2 +Return for a new token: Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Return for a new token: Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Return for a new token: Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Return for a new token: Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Return for a new token: +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Return for a new token: +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Return for a new token: +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Return for a new token: +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Return for a new token: +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Return for a new token: +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Return for a new token: +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Return for a new token: +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Return for a new token: +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Return for a new token: +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Return for a new token: +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Return for a new token: +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Return for a new token: +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Return for a new token: +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Return for a new token: +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Return for a new token: +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Return for a new token: +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Return for a new token: +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Return for a new token: +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Return for a new token: +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Return for a new token: +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Return for a new token: +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Return for a new token: +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Return for a new token: +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Return for a new token: +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Return for a new token: +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Return for a new token: +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Return for a new token: +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Return for a new token: +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Return for a new token: +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Return for a new token: +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Return for a new token: +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1346: cat stderr +./calc.at:1347: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1351: cat stderr +stderr: +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +syntax error, unexpected '=' +1.7: syntax error, unexpected '=' +stderr: +2.1: syntax error +syntax error, unexpected '=' +syntax error +Starting parse +Entering state 0 +Stack now 0 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -1.6: syntax error: invalid character: '#' -stderr: -stderr: -./calc.at:1341: cat stderr -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token Next token is token number (1.1: 1) Shifting token number (1.1: 1) Entering state 1 @@ -41572,30 +30990,90 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Return for a new token: +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Return for a new token: +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -41604,197 +31082,1046 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1364: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -475. calc.at:1341: ok -stderr: -493. calc.at:1367: testing Calculator parse.error=custom ... -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1367: mv calc.y.tmp calc.y - -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Return for a new token: Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Return for a new token: Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Return for a new token: +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Return for a new token: +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 30 -Stack now 0 4 12 21 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Return for a new token: +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Return for a new token: +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Return for a new token: +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Return for a new token: +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Return for a new token: +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Return for a new token: +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Return for a new token: +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Return for a new token: +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Return for a new token: +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Return for a new token: +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Return for a new token: +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Return for a new token: Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Return for a new token: Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Return for a new token: +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Return for a new token: +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Return for a new token: +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Return for a new token: +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Return for a new token: +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Return for a new token: +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Return for a new token: +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Return for a new token: +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Return for a new token: +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Return for a new token: +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Return for a new token: +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Return for a new token: +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Return for a new token: +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Return for a new token: +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Return for a new token: +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Return for a new token: +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 6 8 20 4 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 6 8 20 4 12 +Return for a new token: Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 20 -Stack now 0 8 23 4 12 20 +Stack now 0 6 8 20 4 12 20 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 29 -Stack now 0 8 23 4 12 20 29 +Stack now 0 6 8 20 4 12 20 29 +Return for a new token: Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Return for a new token: Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Return for a new token: +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Return for a new token: +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Return for a new token: +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Return for a new token: +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Return for a new token: +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Return for a new token: +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Return for a new token: +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Return for a new token: +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Return for a new token: +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Return for a new token: +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Return for a new token: +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Return for a new token: +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Return for a new token: +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token end of file (14.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1354: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: ./check +492. calc.at:1364: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %defines api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1364: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1364: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +input: +input: +stdout: +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1355: cat stderr + | + | +1 +./types.at:139: $PREPARSER ./test +./calc.at:1357: cat stderr + | 1//2 +./calc.at:1337: $PREPARSER ./calc input + | error +./calc.at:1350: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1354: $PREPARSER ./calc input +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1340: cat stderr +input: +input: +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -41804,7 +32131,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: + | 1 = 2 = 3 +stderr: + | error +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1347: $PREPARSER ./calc input +./calc.at:1351: $PREPARSER ./calc input + | 1 = 2 = 3 +stderr: +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1346: $PREPARSER ./calc input +stderr: +======== Testing with C++ standard flags: '' +stderr: Starting parse Entering state 0 Stack now 0 @@ -41813,26 +32156,59 @@ Shifting token '\n' (1.1-2.0: ) Entering state 3 Stack now 0 3 -Reducing stack by rule 3 (line 96): +Reducing stack by rule 3 (line 83): $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' +2.1: syntax error Error: popping nterm input (1.1-2.0: ) Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1348: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1347: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +2.1: syntax error +syntax error, unexpected '=' +1.7: syntax error, unexpected '=' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -41842,22 +32218,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: cat stderr -./calc.at:1367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1351: $PREPARSER ./calc input -stderr: -stderr: -stdout: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: cat stderr -input: -input: - | (# + 1) = 1111 -./calc.at:1348: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test - -input: -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -41867,9 +32228,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !* ++ -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -41879,41 +32239,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./calc.at:1357: cat stderr - | error -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1347: cat stderr -stderr: -1.2: syntax error: invalid character: '#' + | 1//2 + | 1//2 stderr: +./calc.at:1357: $PREPARSER ./calc input stderr: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -496. calc.at:1370: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full ... -494. calc.at:1368: testing Calculator parse.error=custom %locations api.prefix={calc} ... -./calc.at:1368: mv calc.y.tmp calc.y +./calc.at:1355: $PREPARSER ./calc input + | 1 2 +./calc.at:1358: $PREPARSER ./calc input +1.7: syntax error +1.1: syntax error, unexpected invalid token +1.7: syntax error +415. types.at:139: ok -./calc.at:1351: "$PERL" -pi -e 'use strict; +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -495. calc.at:1369: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1369: mv calc.y.tmp calc.y - -stderr: -stderr: -1.14: memory exhausted -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: mv calc.y.tmp calc.y - -./calc.at:1368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -41923,95 +32280,119 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1355: cat stderr -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1353: cat stderr -./calc.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stdout: +./calc.at:1364: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS | | +1 stderr: -./calc.at:1357: $PREPARSER ./calc input -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1340: $PREPARSER ./calc input stderr: -stdout: -./calc.at:1351: cat stderr stderr: -./types.at:139: ./check -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -1.2: syntax error: invalid character: '#' - | (1 + 1) / (1 - 1) -./types.at:139: ./check -./calc.at:1355: $PREPARSER ./calc /dev/null -input: -1.14: memory exhausted - | 1 + 2 * 3 + !* ++ -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./calc.at:1351: $PREPARSER ./calc input stderr: -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1334: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y Starting parse Entering state 0 Stack now 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +1.1: syntax error, unexpected invalid token Starting parse Entering state 0 Stack now 0 @@ -42020,54 +32401,164 @@ Shifting token '\n' (1.1-2.0: ) Entering state 3 Stack now 0 3 -Reducing stack by rule 3 (line 96): +Reducing stack by rule 3 (line 83): $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' +2.1: syntax error Error: popping nterm input (1.1-2.0: ) Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -input: +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: cat stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.14: memory exhausted -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11-17: error: null divisor +stderr: +syntax error +stderr: +1.7: syntax error +1.1: syntax error, unexpected invalid token +1.7: syntax error +493. calc.at:1367: testing Calculator parse.error=custom ... +./calc.at:1367: mv calc.y.tmp calc.y + +stderr: stdout: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1350: "$PERL" -pi -e 'use strict; +./types.at:139: ./check +./calc.at:1367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1336: cat stderr ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: ./check +./calc.at:1342: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1343: cat stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -470. calc.at:1334: ./calc.at:1362: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - ok +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stdout: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./types.at:139: $PREPARSER ./test stderr: +input: ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -42078,12 +32569,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -1.14: memory exhausted -./calc.at:1367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1348: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -42093,8 +32580,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.11-17: error: null divisor -./calc.at:1357: "$PERL" -pi -e 'use strict; +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -42104,8 +32590,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1341: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1348: cat stderr +./types.at:139: ./check + | + | +1 +./calc.at:1334: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -42115,7 +32609,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +syntax error +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1338: cat stderr +./calc.at:1344: cat stderr ./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -42126,143 +32632,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1354: cat stderr -./calc.at:1348: cat stderr +stderr: +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 @@ -42272,16 +32655,40 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: cat stderr -./calc.at:1347: "$PERL" -pi -e 'use strict; +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (1.1: ) +Stack now 0 +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stdout: +./types.at:139: ./check +stderr: +stdout: +./calc.at:1360: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +stdout: +./types.at:139: ./check +input: +input: +input: +input: +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1354: cat stderr +./calc.at:1350: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -42291,151 +32698,97 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +./calc.at:1367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | + | +1 + | + | +1 + | + | +1 +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1341: $PREPARSER ./calc input +-std=c++98 not supported + | + | +1 +======== Testing with C++ standard flags: '' + | + | +1 + | error +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1353: $PREPARSER ./calc input +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1338: $PREPARSER ./calc /dev/null +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1360: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h -497. calc.at:1371: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full parse.lac=full ... +stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc input: +./calc.at:1337: cat stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1347: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1358: cat stderr ./calc.at:1351: cat stderr -./calc.at:1355: cat stderr + | + | +1 stderr: stderr: -./calc.at:1371: mv calc.y.tmp calc.y - -input: +stderr: +stderr: +./calc.at:1344: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +1.1: syntax error +2.1: syntax error, unexpected '+' +syntax error +syntax error, unexpected '+' +syntax error +syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) +Next token is token "invalid token" (1.1: ) syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token '\n' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 @@ -42445,45 +32798,76 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (1.1: ) +Stack now 0 +./scanner.at:326: $PREPARSER ./input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1360: $PREPARSER ./calc input +stderr: stdout: -./calc.at:1363: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - | (#) + (#) = 2222 - | 1 = 2 = 3 -./calc.at:1357: $PREPARSER ./calc /dev/null +./calc.at:1362: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr +./calc.at:1357: cat stderr +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | 1 = 2 = 3 stderr: -./calc.at:1371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1364: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1354: $PREPARSER ./calc input -./types.at:139: ./check ./calc.at:1350: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1351: $PREPARSER ./calc input + | error +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1362: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +syntax error, unexpected '+' stderr: +./calc.at:1337: $PREPARSER ./calc /dev/null +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1347: cat stderr Starting parse Entering state 0 Stack now 0 @@ -42498,2095 +32882,2240 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1355: $PREPARSER ./calc input -stderr: -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 6 8 21 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 8 21 30 22 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.6: syntax error: invalid character: '#' -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -stderr: -./calc.at:1368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -480. calc.at:1347: ok -Starting parse -Entering state 0 -Stack now 0 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 -Stack now 0 1 +Stack now 0 6 2 2 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 8 20 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 8 20 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 8 20 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 6 8 20 4 12 20 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of inp1.6: syntax error: invalid character: '#' -ut. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1357: cat stderr - -./calc.at:1353: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1357: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 8 24 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 8 24 33 24 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 6 4 12 24 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token end of file (14.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1353: $PREPARSER ./calc input +input: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: + | + | +1 +./calc.at:1347: $PREPARSER ./calc input +stderr: +stderr: +stderr: + | 1//2 +stderr: +stderr: + | 1 = 2 = 3 +./calc.at:1358: $PREPARSER ./calc input +stderr: +stderr: +stderr: +./calc.at:1351: $PREPARSER ./calc input +stderr: stderr: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +1.7: syntax error, unexpected '=' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +stderr: +syntax error, unexpected '+' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +syntax error +2.1: syntax error, unexpected '+' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -498. calc.at:1374: testing Calculator %start input exp NUM api.value.type=union ... -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 22 -Stack now 0 8 21 30 22 +Stack now 0 6 8 21 30 22 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1374: mv calc.y.tmp calc.y - -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1348: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1348: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 2 10 24 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by 1.1: syntax error +rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 6 4 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 8 24 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -499. calc.at:1375: testing Calculator %start input exp NUM api.value.type=union %locations parse.error=detailed ... -./calc.at:1375: mv calc.y.tmp calc.y - -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1375: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1357: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: cat stderr -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -stderr: -./calc.at:1355: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1351: cat stderr - | (!!) + (1 2) = 1 -stderr: -./calc.at:1357: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 4 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token end of file (14.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +syntax error +464. scanner.at:326: ok +syntax error +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1362: $PREPARSER ./calc input +input: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +stderr: +./calc.at:1346: $PREPARSER ./calc input +stderr: +2.1: syntax error + | error +./calc.at:1357: $PREPARSER ./calc input + | error +stderr: +./calc.at:1355: $PREPARSER ./calc input +1.7: syntax error, unexpected '=' +syntax error, unexpected '+' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 + +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -44596,806 +35125,930 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 8 21 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1348: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - | (1 + #) = 1111 -./calc.at:1351: $PREPARSER ./calc input -./calc.at:1355: cat stderr -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -481. calc.at:1348: ok -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1354: cat stderr -./calc.at:1357: cat stderr -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1353: cat stderr -input: -stderr: -stderr: - | - | +1 -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) Entering state 2 -Stack now 0 4 2 +Stack now 0 6 2 2 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1354: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' - | (- *) + (1 2) = 1 -./calc.at:1357: $PREPARSER ./calc input -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 8 20 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 8 20 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -stderr: - | 1 + 2 * 3 + !* ++ -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token end of file (14.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1351: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: cat stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -45405,258 +36058,137 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +2.1: syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Now at end of input. +1.1: syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +1.7: syntax error, unexpected '=' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -45666,696 +36198,938 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1351: cat stderr -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1355: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -input: -input: - | (* *) + (*) + (*) - | (# + 1) = 1111 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: $PREPARSER ./calc input -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: $PREPARSER ./calc input -stderr: -stderr: -500. calc.at:1387: testing Calculator %glr-parser ... -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1354: cat stderr -stdout: -./calc.at:1387: mv calc.y.tmp calc.y - -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 -Stack now 0 4 +Stack now 0 6 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: cat stderr -./calc.at:1367: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1354: $PREPARSER ./calc /dev/null -./calc.at:1387: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 8 20 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1367: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 - | (* *) + (*) + (*) -./calc.at:1357: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1353: cat stderr -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token end of file (14.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $PREPARSER ./test -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1355: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +494. calc.at:1368: testing Calculator parse.error=custom %locations api.prefix={calc} ... +./calc.at:1368: mv calc.y.tmp calc.y + +./calc.at:1368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -46365,290 +37139,291 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: + | 1 2 +stderr: +stdout: +stdout: +./calc.at:1340: $PREPARSER ./calc /dev/null +./calc.at:1360: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test +stderr: +2.1: syntax error +./types.at:139: ./check +1.7: syntax error, unexpected '=' +stderr: stderr: - | (#) + (#) = 2222 -./calc.at:1353: $PREPARSER ./calc input stderr: stderr: -./calc.at:1374: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: cat stderr -./calc.at:1375: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +2.1: syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +stderr: +stdout: +./types.at:139: ./check +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1348: cat stderr +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: + | 1 2 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: $PREPARSER ./calc input +stderr: +syntax error +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1348: $PREPARSER ./calc /dev/null +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1354: "$PERL" -pi -e 'use strict; +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -46658,10 +37433,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: ./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -46672,137 +37443,58 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -436. types.at:139: ok -./calc.at:1351: cat stderr +./calc.at:1343: cat stderr +./calc.at:1344: cat stderr +stderr: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: $PREPARSER ./calc /dev/null +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +1.1: syntax error, unexpected end of file +stderr: +stderr: +syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1343: $PREPARSER ./calc /dev/null +./calc.at:1344: $PREPARSER ./calc /dev/null +stderr: +stdout: +./types.at:139: ./check +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: cat stderr +./calc.at:1342: cat stderr +./calc.at:1341: cat stderr input: +./calc.at:1336: cat stderr +./calc.at:1355: cat stderr +./calc.at:1351: cat stderr ./calc.at:1354: cat stderr -input: - | 1 + 2 * 3 + !+ ++ - | 1 2 ./calc.at:1357: cat stderr -input: -./calc.at:1350: "$PERL" -pi -e 'use strict; +./calc.at:1350: cat stderr +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -46812,16 +37504,153 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1367: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1355: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1358: cat stderr +stderr: +syntax error, unexpected end of file +./calc.at:1337: cat stderr +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: cat stderr +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr +stderr: +stderr: +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +syntax error, unexpected end of input +./calc.at:1336: $PREPARSER ./calc /dev/null +./calc.at:1341: $PREPARSER ./calc /dev/null +1.1: syntax error, unexpected end of file +./calc.at:1342: $PREPARSER ./calc /dev/null +input: +input: +input: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1360: cat stderr + | 1 = 2 = 3 +stderr: +./calc.at:1353: $PREPARSER ./calc input stderr: + | 1 = 2 = 3 + | 1 = 2 = 3 + | + | +1 + | 1 = 2 = 3 + | + | +1 + | error +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1357: $PREPARSER ./calc input ./calc.at:1351: $PREPARSER ./calc input -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: +syntax error stderr: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: $PREPARSER ./calc /dev/null +syntax error +./calc.at:1346: $PREPARSER ./calc /dev/null +syntax error, unexpected end of file +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +stderr: +syntax error +stderr: +stdout: +./calc.at:1363: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +stdout: +stderr: +./calc.at:1364: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stdout: +./calc.at:1367: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1362: cat stderr +stderr: +stderr: +stderr: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +stderr: +./calc.at:1337: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +stderr: +1.1: syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +2.1: syntax error, unexpected '+' +2.1: syntax error, unexpected '+' +1.1: syntax error +./calc.at:1363: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + Starting parse Entering state 0 Stack now 0 @@ -46836,94 +37665,70 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1350: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -input: -stderr: -stderr: - - | 1 + 2 * 3 + !+ ++ -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1354: $PREPARSER ./calc input +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -46938,79 +37743,127 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1364: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Stack now 0 8 19 +Error: popping token '=' (1.1: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.1: ) +Stack now 0 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +syntax error, unexpected end of input +./calc.at:1367: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1360: $PREPARSER ./calc input stderr: stderr: stderr: -1.6: syntax error: invalid character: '#' -input: +stderr: +syntax error +syntax error +syntax error Starting parse Entering state 0 Stack now 0 @@ -47021,7 +37874,7 @@ Stack now 0 4 Reading a token Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.2: syntax error Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -47029,7 +37882,7 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -47047,12 +37900,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 @@ -47062,18 +37915,18 @@ Entering state 21 Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -47085,18 +37938,18 @@ Entering state 21 Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -47109,7 +37962,7 @@ Stack now 0 8 21 4 12 21 Reading a token Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error Error: popping token '+' (1.17: ) Stack now 0 8 21 4 12 Error: popping nterm exp (1.7-15: 3) @@ -47121,7 +37974,7 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -47130,7 +37983,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -47148,7 +38001,7 @@ Stack now 0 8 21 4 Reading a token Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.23: syntax error Shifting token error (1.23: ) Entering state 11 Stack now 0 8 21 4 11 @@ -47180,7 +38033,7 @@ Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -47189,7 +38042,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -47206,12 +38059,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 @@ -47221,18 +38074,18 @@ Entering state 22 Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 31 Stack now 0 8 21 4 12 22 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 107): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -47245,7 +38098,7 @@ Stack now 0 8 21 4 12 22 Reading a token Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error Error: popping token '*' (1.39: ) Stack now 0 8 21 4 12 Error: popping nterm exp (1.33-37: 2) @@ -47265,7 +38118,7 @@ Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -47274,7 +38127,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -47286,18 +38139,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) @@ -47309,19 +38162,79 @@ Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of inpStarting parse +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1//2 +stderr: +stderr: +stderr: +stderr: +stderr: +./calc.at:1362: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stderr: +Starting parse Entering state 0 Stack now 0 Reading a token @@ -47335,87 +38248,125 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +2.1: syntax error, unexpected '+' +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: cat stderr - | (1 + #) = 1111 -./calc.at:1350: $PREPARSER ./calc input +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Stack now 0 8 19 +Error: popping token '=' (1.1: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.1: ) +Stack now 0 stderr: -ut. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error, unexpected '+' stderr: -1.6: syntax error: invalid character: '#' +stderr: +1.1: syntax error +1.1: syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +input: Starting parse Entering state 0 Stack now 0 @@ -47430,17 +38381,88 @@ Entering state 8 Stack now 0 8 Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1367: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1363: $PREPARSER ./calc input +stderr: +stderr: +stdout: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -47450,61 +38472,1013 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1355: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./types.at:139: ./check +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: cat stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -47514,6 +39488,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1348: cat stderr +stderr: +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -47524,7 +39532,7 @@ Stack now 0 4 Reading a token Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.2: syntax error Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -47532,7 +39540,7 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -47550,12 +39558,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 @@ -47565,18 +39573,18 @@ Entering state 21 Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -47588,18 +39596,18 @@ Entering state 21 Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -47612,7 +39620,7 @@ Stack now 0 8 21 4 12 21 Reading a token Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error Error: popping token '+' (1.17: ) Stack now 0 8 21 4 12 Error: popping nterm exp (1.7-15: 3) @@ -47624,7 +39632,7 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -47633,7 +39641,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -47651,7 +39659,7 @@ Stack now 0 8 21 4 Reading a token Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.23: syntax error Shifting token error (1.23: ) Entering state 11 Stack now 0 8 21 4 11 @@ -47683,7 +39691,7 @@ Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -47692,7 +39700,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -47709,12 +39717,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 @@ -47724,18 +39732,18 @@ Entering state 22 Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 31 Stack now 0 8 21 4 12 22 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 107): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -47748,7 +39756,7 @@ Stack now 0 8 21 4 12 22 Reading a token Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error Error: popping token '*' (1.39: ) Stack now 0 8 21 4 12 Error: popping nterm exp (1.33-37: 2) @@ -47768,7 +39776,7 @@ Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -47777,7 +39785,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -47789,18 +39797,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) @@ -47812,30 +39820,50 @@ Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of inpstderr: -./calc.at:1353: $PREPARSER ./calc input +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -47845,12 +39873,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -47860,2346 +39888,1016 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1367: cat stderr -ut. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -./calc.at:1351: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1357: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 22 -Stack now 0 8 21 30 22 +Stack now 0 6 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Stack now 0 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -input: -stderr: -input: - | (1 + 1) / (1 - 1) -./calc.at:1351: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1//2 -./calc.at:1367: $PREPARSER ./calc input -stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11-17: error: null divisor -stderr: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1387: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -501. calc.at:1389: testing Calculator %glr-parser %header ... -./calc.at:1389: mv calc.y.tmp calc.y - -./calc.at:1354: cat stderr -1.11-17: error: null divisor -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1350: cat stderr -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -input: -./calc.at:1355: cat stderr -./calc.at:1389: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - | (!!) + (1 2) = 1 -./calc.at:1357: cat stderr -input: -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: cat stderr - | (# + 1) = 1111 -./calc.at:1350: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -./calc.at:1367: cat stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1357: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -stderr: -1.2: syntax error: invalid character: '#' - | (# + 1) = 1111 -stderr: -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1351: cat stderr -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 -Stack now 0 4 +Stack now 0 6 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | error -./calc.at:1367: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -483. calc.at:1351: ok -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.2: syntax error: invalid character: '#' -stderr: -Starting parse -Entering state 0 -Stack now 0 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1354: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: cat stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -input: -./calc.at:1357: cat stderr -stdout: - | (1 + # + 1) = 1111 -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1350: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1364: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1354: $PREPARSER ./calc input -input: -./calc.at:1364: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: cat stderr -./calc.at:1389: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -stderr: - | (#) + (#) = 2222 -./calc.at:1357: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 8 19 2 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./calc.at:1367: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1353: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -502. calc.at:1390: testing Calculator %glr-parser %locations ... -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -input: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 8 20 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (#) + (#) = 2222 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1390: mv calc.y.tmp calc.y - -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (1 + # + 1) = 1111 -./calc.at:1353: $PREPARSER ./calc input -stderr: -./calc.at:1390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 -Stack now 0 4 +Stack now 0 6 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -stderr: +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -50517,28 +41215,7 @@ Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) -Enter./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -ing state 25 +Entering state 25 Stack now 0 6 8 25 Reducing stack by rule 4 (line 84): $1 = nterm exp (4.1-9: -1) @@ -50858,7 +41535,8 @@ Reducing stack by rule 5 (line 88): $1 = token "number" (9.14: 4) -> $$ = nterm exp (9.14: 4) -Entering state 10 +Entering st./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr +ate 10 Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) @@ -51238,249 +41916,144 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +' expout || exit 77 +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1343: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1340: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1350: cat stderr -Starting parse -Entering state 0 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: -stderr: -./calc.at:1368: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1354: cat stderr Starting parse Entering state 0 Stack now 0 @@ -52498,53 +43071,89 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1367: cat stderr input: - | (1 + 1) / (1 - 1) -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1357: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1338: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1368: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: cat stderr +./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1344: cat stderr + | 1 2 stderr: -input: - | - | +1 - | (* *) + (*) + (*) ./calc.at:1367: $PREPARSER ./calc input -1.11-17: error: null divisor -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1364: $PREPARSER ./calc input -input: stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 input: +input: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: cat stderr +./calc.at:1351: cat stderr +./calc.at:1357: cat stderr ./calc.at:1355: cat stderr +./calc.at:1350: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1 2 +./calc.at:1343: $PREPARSER ./calc input stderr: +./calc.at:1364: $PREPARSER ./calc input +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: ./check +input: +input: +input: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: cat stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -52554,19 +43163,115 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 +./calc.at:1347: cat stderr +./calc.at:1336: cat stderr +input: +./calc.at:1341: cat stderr +./calc.at:1346: cat stderr +./calc.at:1354: cat stderr +stderr: + | 1 2 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1334: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1344: $PREPARSER ./calc input +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 +stderr: +./calc.at:1350: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1351: $PREPARSER ./calc /dev/null +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +input: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1353: cat stderr +input: +./calc.at:1337: cat stderr +./calc.at:1360: cat stderr + | 1 = 2 = 3 | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 + | +1 | - | 2^2^3 = 256 - | (2^2)^3 = 64 + | +1 +stderr: +stderr: +stderr: +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1357: $PREPARSER ./calc input +stderr: +stderr: +stderr: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: $PREPARSER ./calc input +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +1.1: syntax error, unexpected end of file +1.11: syntax error +1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +1.1: syntax error, unexpected end of file Starting parse Entering state 0 Stack now 0 @@ -52576,372 +43281,423 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) Stack now 0 8 21 4 -Shifting token error (1.10: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) Stack now 0 8 21 4 -Shifting token error (1.16: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (1 + #) = 1111 -stdout: -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1368: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -stderr: -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1369: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: -stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 2) +Stack now 0 8 21 4 +Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +input: +input: +input: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1362: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | + | +1 stderr: - | (1 + #) = 1111 -./calc.at:1355: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1341: $PREPARSER ./calc input stderr: stderr: +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1347: $PREPARSER ./calc input +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -52951,30 +43707,14 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1369: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1353: cat stderr -stderr: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +Starting parse +Entering state 0 Stack now 0 Reading a token Next token is token "number" (1.1: 1) @@ -52993,7 +43733,138 @@ Stack now 0 Cleanup: discarding lookahead token "number" (1.3: 2) Stack now 0 +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +stderr: +stdout: +./calc.at:1368: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error + | (!!) + (1 2) = 1 +stderr: + | + | +1 +./calc.at:1360: $PREPARSER ./calc input +stderr: +stderr: +stderr: +./calc.at:1353: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +./calc.at:1337: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1368: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +1.1: syntax error, unexpected end of file +1.11: syntax error +1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -53003,95 +43874,384 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) +Stack now 0 8 21 4 +Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 2) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +1.1: syntax error, unexpected end of file +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: cat stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | error +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1362: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -53101,160 +44261,229 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 21 4 -Shifting token error (1.10: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -53269,6 +44498,164 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: cat stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1364: cat stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +stderr: +stderr: +stderr: +stderr: +./calc.at:1367: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (1.1: ) +Stack now 0 +./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -53278,102 +44665,147 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 input: - | (1 + 1) / (1 - 1) - | 1 2 -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -53383,8 +44815,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: cat stderr -./calc.at:1357: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1340: cat stderr +./calc.at:1363: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -53395,8 +44858,40 @@ }eg ' expout || exit 77 stderr: -./calc.at:1390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) + | (!!) + (1 2) = 1 +stderr: +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1348: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -53407,8 +44902,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: cat stderr +./calc.at:1350: cat stderr +./calc.at:1343: cat stderr + | 1//2 + | 1 2 +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1368: $PREPARSER ./calc input stderr: -./calc.at:1355: "$PERL" -pi -e 'use strict; +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +input: +input: +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -53418,158 +44925,369 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1338: cat stderr +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: cat stderr +./calc.at:1355: cat stderr +./calc.at:1344: cat stderr +./calc.at:1358: cat stderr + | 1//2 + | (!!) + (1 2) = 1 +stderr: +./calc.at:1340: $PREPARSER ./calc input +stderr: +./calc.at:1363: $PREPARSER ./calc input +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): +Stack now 0 1 +Reducing stack by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +input: +input: +input: +./calc.at:1334: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (!!) + (1 2) = 1 +./calc.at:1350: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: $PREPARSER ./calc input +syntax error +error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): +Stack now 0 1 +Reducing stack by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) Entering state 23 Stack now 0 8 23 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 23 4 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1357: $PREPARSER ./calc /dev/null +./calc.at:1355: $PREPARSER ./calc /dev/null +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +input: +input: +input: +./calc.at:1347: cat stderr +./calc.at:1346: cat stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 + | + | +1 +./calc.at:1360: cat stderr +stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: $PREPARSER ./calc input +stderr: +./calc.at:1338: $PREPARSER ./calc input +stderr: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1344: $PREPARSER ./calc input +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +syntax error, unexpected number +error: 2222 != 1 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1354: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): +Stack now 0 1 +Reducing stack by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +input: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: cat stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: cat stderr +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: cat stderr +./calc.at:1362: cat stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: cat stderr + | (!!) + (1 2) = 1 +stderr: +stderr: +./calc.at:1334: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +syntax error, unexpected number +error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): +Stack now 0 1 +Reducing stack by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +syntax error +error: 2222 != 1 +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +input: +input: + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1367: cat stderr -./calc.at:1357: cat stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: cat stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (!!) + (1 2) = 1 + | 1 = 2 = 3 +./calc.at:1346: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1368: cat stderr stderr: stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: -482. calc.at:1350: ok -./calc.at:1367: $PREPARSER ./calc /dev/null -./calc.at:1364: cat stderr +stderr: +stderr: +./calc.at:1347: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +syntax error, unexpected number +error: 2222 != 1 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 Starting parse Entering state 0 Stack now 0 @@ -53579,113 +45297,112 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) + $2 = token error (1.1: ) $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 23 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 +Entering state 28 +Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -53693,7 +45410,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) + $1 = nterm exp (1.1: 2222) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -53711,105 +45428,120 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -stderr: -./calc.at:1355: cat stderr -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: cat stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 input: -stderr: +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1368: cat stderr - | (# + 1) = 1111 -./calc.at:1357: $PREPARSER ./calc input - | 1//2 -./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: +input: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (!!) + (1 2) = 1 + | (!!) + (1 2) = 1 + | 1 = 2 = 3 +stderr: + | (- *) + (1 2) = 1 + | (!!) + (1 2) = 1 +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1341: $PREPARSER ./calc input stderr: -./calc.at:1364: $PREPARSER ./calc input stderr: - | (# + 1) = 1111 stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +stderr: +./calc.at:1353: $PREPARSER ./calc /dev/null +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +syntax error, unexpected number +error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -53818,49 +45550,15 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1355: $PREPARSER ./calc input -stderr: -input: -stdout: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -53871,158 +45569,74 @@ }eg ' expout || exit 77 input: -./calc.at:1370: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: cat stderr stderr: - | 1 + 2 * 3 + !+ ++ input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) stderr: - - | 1 2 -./calc.at:1369: $PREPARSER ./calc input stderr: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1//2 ./calc.at:1368: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: stderr: -stdout: -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1370: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - +syntax error +error: 2222 != 1 stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 + | error +syntax error +error: 2222 != 1 +syntax error +error: 2222 != 1 +./calc.at:1367: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 Starting parse Entering state 0 @@ -54033,93 +45647,145 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +1.4: syntax error +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: Starting parse Entering state 0 Stack now 0 @@ -54129,180 +45795,191 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1374: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1367: cat stderr +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -54317,104 +45994,36 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stderr: -./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1374: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -input: -./calc.at:1364: "$PERL" -pi -e 'use strict; +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +1.11: syntax error +1.1-16: error: 2222 != 1 +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -54424,7 +46033,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1357: "$PERL" -pi -e 'use strict; +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -54434,11 +46043,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -484. calc.at:1353: stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1367: $PREPARSER ./calc input - ok -./calc.at:1357: cat stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -54449,59 +46065,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: + | (- *) + (1 2) = 1 +./calc.at:1348: $PREPARSER ./calc input stderr: -./calc.at:1364: cat stderr -./calc.at:1368: cat stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: +./calc.at:1363: cat stderr stderr: -input: -./calc.at:1360: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -stdout: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1374: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !- ++ stderr: -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (1 + # + 1) = 1111 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1355: cat stderr -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -./calc.at:1369: cat stderr stderr: -input: -input: stderr: - | error -./calc.at:1368: $PREPARSER ./calc input +syntax error +error: 2222 != 1 +syntax error +error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +syntax error +error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -54516,114 +46113,31 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -503. calc.at:1391: testing Calculator %glr-parser %locations api.location.type={Span} ... - | error -./calc.at:1364: $PREPARSER ./calc input -./scanner.at:326: $PREPARSER ./input -./calc.at:1360: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stderr: -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1391: mv calc.y.tmp calc.y - -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr - -stderr: -stdout: -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: -stderr: -./calc.at:1358: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | (1 + # + 1) = 1111 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Error: popping nterm exp (1.1: 1) Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1391: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y Starting parse Entering state 0 Stack now 0 @@ -54633,327 +46147,654 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.4: ) +1.4: syntax error +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) input: -./calc.at:1358: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1351: cat stderr +./calc.at:1350: cat stderr +./calc.at:1357: cat stderr +stderr: + | error +stderr: +./calc.at:1364: $PREPARSER ./calc input +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +' expout || exit 77 +input: +./calc.at:1340: cat stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: cat stderr +./calc.at:1358: cat stderr +./calc.at:1354: cat stderr +./calc.at:1343: cat stderr +stderr: + | error +./calc.at:1363: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +input: +input: +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1338: cat stderr +input: +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1344: cat stderr + | (!!) + (1 2) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (!!) + (1 2) = 1 +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $PREPARSER ./calc input +stderr: +./calc.at:1350: $PREPARSER ./calc input stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1358: $PREPARSER ./calc /dev/null +input: +input: +./calc.at:1368: cat stderr +input: +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +' expout || exit 77 +./calc.at:1360: cat stderr input: +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (- *) + (1 2) = 1 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1343: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1340: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: ./calc.at:1355: $PREPARSER ./calc input +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | 1//2 -./calc.at:1369: $PREPARSER ./calc input -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -stderr: - | 1 2 -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -54971,11 +46812,39 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) input: -./calc.at:1360: $PREPARSER ./calc input -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: cat stderr +input: +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: cat stderr +./calc.at:1334: cat stderr +./calc.at:1346: cat stderr +./calc.at:1367: cat stderr + | (* *) + (*) + (*) + | (- *) + (1 2) = 1 +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1344: $PREPARSER ./calc input stderr: stderr: -input: +stderr: +syntax error +syntax error +error: 2222 != 1 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +stderr: Starting parse Entering state 0 Stack now 0 @@ -54985,95 +46854,298 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -55090,52 +47162,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) - | 1 2 -./calc.at:1367: cat stderr -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: $PREPARSER ./calc input -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -464. scanner.at:326: ok -syntax error -stderr: -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: Starting parse Entering state 0 Stack now 0 @@ -55145,95 +47171,298 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -55250,14 +47479,11 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stdout: -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: cat stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -55267,2154 +47493,2202 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) -./types.at:139: $PREPARSER ./test - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: + | error +stderr: +./calc.at:1368: $PREPARSER ./calc input + | + | +1 +stderr: +./calc.at:1360: $PREPARSER ./calc input +stderr: +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +1.2: syntax error +1.10: syntax error +1.16: syntax error stderr: -input: -syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) +Reading a token +Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 6 8 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +input: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1336: cat stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1341: cat stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: cat stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: cat stderr +input: +input: +input: +input: +stderr: + | (- *) + (1 2) = 1 + | 1 = 2 = 3 + | + | +1 + | (- *) + (1 2) = 1 +stderr: + | (- *) + (1 2) = 1 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1347: $PREPARSER ./calc input +stderr: +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1334: $PREPARSER ./calc input +stderr: +./calc.at:1362: $PREPARSER ./calc input +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) Entering state 3 -Stack now 0 6 3 +Stack now 0 3 Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 +Stack now 0 8 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Stack now 0 8 21 4 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 6 4 2 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 6 8 24 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +syntax error +syntax error +error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 6 8 20 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 6 8 20 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 6 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 6 4 12 24 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1368: cat stderr +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) stderr: -./calc.at:1354: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1367: $PREPARSER ./calc input stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1348: cat stderr +stderr: +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +stderr: +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +1.2: syntax error +1.10: syntax error +1.16: syntax error +input: +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1364: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (- *) + (1 2) = 1 + | (- *) + (1 2) = 1 +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1341: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +stderr: +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1342: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Return for a new token: +Next token is token '*' (1.2: ) +1.2: syntax error +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Return for a new token: +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Return for a new token: +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Return for a new token: +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: +Next token is token '*' (1.16: ) +1.16: syntax error +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) Entering state 3 -Stack now 0 6 3 +Stack now 0 3 Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Return for a new token: -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Return for a new token: -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Return for a new token: Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Return for a new token: +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +syntax error +syntax error +error: 2222 != 1 +stderr: +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +syntax error +error: 2222 != 1 +syntax error +syntax error +error: 2222 != 1 +stderr: +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: +Stack now 0 4 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Stack now 0 8 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Return for a new token: +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Return for a new token: -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Return for a new token: +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Return for a new token: +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 19 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) Entering state 3 -Stack now 0 6 3 +Stack now 0 3 Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Return for a new token: -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Return for a new token: Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Return for a new token: +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = ./calc.at:1357: cat stderr -nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 8 20 4 -Return for a new token: +Stack now 0 8 21 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 6 8 20 4 12 -Return for a new token: +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 -Return for a new token: +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Return for a new token: +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '*' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: +Stack now 0 8 21 4 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 2) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 19 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 -Return for a new token: -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Return for a new token: Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Return for a new token: +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Return for a new token: +Next token is token '*' (1.2: ) +1.2: syntax error +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Return for a new token: +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: +Stack now 0 8 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 6 4 -Return for a new token: -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Return for a new token: +Stack now 0 8 21 4 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Return for a new token: +Next token is token '*' (1.10: ) +1.10: syntax error +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Return for a new token: +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Stack now 0 6 8 -Return for a new token: -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Return for a new token: +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 -Return for a new token: +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Next token is token '*' (1.16: ) +1.16: syntax error +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1364: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +input: +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -57424,2155 +49698,1950 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -input: +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: cat stderr +./calc.at:1351: cat stderr +./calc.at:1363: cat stderr +./calc.at:1357: cat stderr +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: cat stderr | 1 = 2 = 3 + | (* *) + (*) + (*) +./calc.at:1358: cat stderr +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: cat stderr +stderr: +stderr: +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +stderr: +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error +syntax error +error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token '*' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 4 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 2) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +syntax error +syntax error +error: 2222 != 1 +syntax error +syntax error +error: 2222 != 1 +input: +input: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1368: cat stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +' expout || exit 77 +./calc.at:1343: cat stderr + | (!!) + (1 2) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (- *) + (1 2) = 1 + | 1 = 2 = 3 + | (!!) + (1 2) = 1 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1351: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1367: cat stderr +./calc.at:1340: cat stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1354: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 8 19 28 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 6 8 20 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Stack now 0 8 21 4 12 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Stack now 0 8 21 4 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 6 4 12 24 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -input: -./calc.at:1369: cat stderr -input: -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1354: $PREPARSER ./calc input -input: -504. calc.at:1392: testing Calculator %glr-parser %name-prefix "calc" ... -./calc.at:1392: mv calc.y.tmp calc.y - -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1355: cat stderr -./calc.at:1368: $PREPARSER ./calc input +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Return for a new token: Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 Return for a new token: Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 Return for a new token: Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 Return for a new token: Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 Return for a new token: Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 4 12 21 Return for a new token: Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 6 8 21 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 6 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 +Stack now 0 8 21 4 12 21 30 Return for a new token: Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Return for a new token: Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Return for a new token: -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 21 4 12 21 30 Return for a new token: Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Return for a new token: Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 Return for a new token: Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Return for a new token: Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Return for a new token: Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 Return for a new token: Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Return for a new token: Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 Return for a new token: Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 Return for a new token: Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Return for a new token: Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 6 4 2 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Return for a new token: Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Return for a new token: Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 Return for a new token: Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Return for a new token: Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Return for a new token: Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Return for a new token: Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1338: cat stderr +./calc.at:1360: cat stderr +./calc.at:1362: cat stderr +./calc.at:1344: cat stderr + | (* *) + (*) + (*) +./calc.at:1343: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1368: $PREPARSER ./calc input +stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Return for a new token: +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Return for a new token: +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 6 2 2 2 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Return for a new token: + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 -Stack now 0 6 8 19 -Return for a new token: -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: +Stack now 0 8 19 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1337: cat stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1347: cat stderr +./calc.at:1346: cat stderr + | + | +1 + | (* *) + (*) + (*) +stderr: +stderr: +stderr: +stderr: +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1362: $PREPARSER ./calc /dev/null +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1360: $PREPARSER ./calc /dev/null +stderr: +stderr: +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: +Stack now 0 8 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 -Stack now 0 6 8 19 -Return for a new token: -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: +Stack now 0 8 19 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Return for a new token: Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Return for a new token: Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 Return for a new token: Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 21 4 Return for a new token: Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 6 8 20 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 6 8 20 4 12 +Stack now 0 8 21 4 12 Return for a new token: Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Return for a new token: Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Return for a new token: Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Return for a new token: Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Return for a new token: Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Return for a new token: Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 Return for a new token: Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Stack now 0 6 8 -Return for a new token: -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Return for a new token: Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Return for a new token: Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 Return for a new token: Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Return for a new token: Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 8 21 4 Return for a new token: Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 6 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 21 4 12 Return for a new token: Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Return for a new token: Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 6 4 12 24 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Return for a new token: Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Return for a new token: Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 Return for a new token: Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Return for a new token: Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Return for a new token: Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.47-2.0: stderr: +./calc.at:1367: $PREPARSER ./calc input +) Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Return for a new token: Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: stderr: - | (1 + 1) / (1 - 1) -./calc.at:1357: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1374: "$PERL" -pi -e 'use strict; +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -59582,108 +51651,193 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -input: -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1370: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 - | error -stderr: -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1360: $PREPARSER ./calc input -input: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -input: - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1355: $PREPARSER ./calc input -stderr: +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 stderr: - | 1 2 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +syntax error +syntax error +syntax error +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -59693,122 +51847,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 8 23 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -59825,69 +51978,78 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: cat stderr +input: +input: +input: +input: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: cat stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | (* *) + (*) + (*) + | 1 + 2 * 3 + !+ ++ + | (* *) + (*) + (*) + | (* *) + (*) + (*) +./calc.at:1347: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1346: $PREPARSER ./calc input +stderr: +./calc.at:1337: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 -./calc.at:1358: $PREPARSER ./calc input -input: -stderr: - | 1//2 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 +./calc.at:1341: cat stderr +./calc.at:1336: cat stderr +./calc.at:1342: cat stderr +./calc.at:1364: cat stderr +./calc.at:1353: cat stderr +stderr: +stderr: +stderr: ./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +stderr: +stderr: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -59897,12 +52059,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -59912,18 +52074,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -59931,7 +52093,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -59948,58 +52110,135 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 129): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error +1.10: syntax error +1.16: syntax error +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +syntax error +syntax error +syntax error +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !+ ++ + | (* *) + (*) + (*) +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1334: $PREPARSER ./calc input +stderr: +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +input: +Starting parse +Entering state 0 Stack now 0 -./calc.at:1374: cat stderr +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (!!) + (1 2) = 1 +stderr: +stderr: +stderr: +./calc.at:1353: $PREPARSER ./calc input +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -60010,311 +52249,264 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1368: cat stderr -stderr: -stderr: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1367: cat stderr -./calc.at:1391: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +1.2: syntax error +1.10: syntax error +1.16: syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 30 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.2: syntax error +1.10: syntax error +1.16: syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +input: +./calc.at:1368: cat stderr +./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: cat stderr +./calc.at:1358: cat stderr +./calc.at:1354: cat stderr +./calc.at:1355: cat stderr +./calc.at:1350: cat stderr +./calc.at:1351: cat stderr +./calc.at:1357: cat stderr + | (* *) + (*) + (*) + | (* *) + (*) + (*) +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1336: $PREPARSER ./calc input + | + | +1 + | (* *) + (*) + (*) +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1341: $PREPARSER ./calc input stderr: stderr: +./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -60324,172 +52516,138 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -505. calc.at:1393: testing Calculator %glr-parser api.prefix={calc} ... -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -input: -input: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 - | - | +1 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: cat stderr -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1393: mv calc.y.tmp calc.y - -./calc.at:1354: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -60499,9 +52657,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -./calc.at:1358: "$PERL" -pi -e 'use strict; +input: +./calc.at:1367: cat stderr +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -60511,7 +52670,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1357: "$PERL" -pi -e 'use strict; +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -60521,55 +52680,201 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: + | 1 + 2 * 3 + !- ++ +stderr: +stderr: +./calc.at:1338: $PREPARSER ./calc input +stderr: +stderr: +syntax error +syntax error +syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +stderr: +syntax error +syntax error +syntax error +syntax error +syntax error +syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: - | (- *) + (1 2) = 1 -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) input: -./calc.at:1360: "$PERL" -pi -e 'use strict; +input: +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -60579,10 +52884,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 = 2 = 3 -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -60592,27 +52896,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: cat stderr -stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -stderr: -./calc.at:1358: cat stderr -./calc.at:1360: cat stderr -stderr: -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1355: cat stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1357: cat stderr -syntax error input: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; +input: +input: +input: +input: +input: +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -60622,29 +52912,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1370: cat stderr - | (#) + (#) = 2222 input: +input: +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 + | (- *) + (1 2) = 1 + | (!!) + (1 2) = 1 + | 1 + 2 * 3 + !- ++ + | (- *) + (1 2) = 1 +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1355: $PREPARSER ./calc input + | + | +1 ./calc.at:1354: $PREPARSER ./calc input -486. calc.at:1355: input: - ok - | 1//2 -stderr: -487. calc.at:1357: stderr: + | (* *) + (*) + (*) ./calc.at:1358: $PREPARSER ./calc input - | 1//2 -./calc.at:1360: $PREPARSER ./calc input -stderr: - ok -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1364: cat stderr -./calc.at:1368: cat stderr -./calc.at:1392: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | (* *) + (*) + (*) stderr: +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1351: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1348: $PREPARSER ./calc input + | + | +1 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: $PREPARSER ./calc input stderr: +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1367: $PREPARSER ./calc /dev/null +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -60654,154 +52951,195 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 8 21 4 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) Stack now 0 8 21 4 -Shifting token error (1.8: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: -./calc.at:1368: $PREPARSER ./calc /dev/null -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: cat stderr +./calc.at:1343: cat stderr +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1375: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Starting parse Entering state 0 Stack now 0 @@ -60811,102 +53149,129 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) Stack now 0 8 21 4 -Shifting token error (1.8: ) +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.15: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -60923,208 +53288,159 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -input: - | error -input: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Return for a new token: Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Return for a new token: Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1375: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | - | +1 -stderr: -./calc.at:1369: cat stderr -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -stderr: - -./calc.at:1367: cat stderr -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Return for a new token: +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) - -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -input: -./calc.at:1374: cat stderr -./calc.at:1367: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: -stderr: -./calc.at:1354: cat stderr +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: - | - | +1 -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -61150,50 +53466,89 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1358: cat stderr -stderr: -./calc.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -./calc.at:1368: cat stderr -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -input: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | (1 + #) = 1111 -./calc.at:1360: cat stderr -stderr: -input: - | error -stdout: -./calc.at:1358: $PREPARSER ./calc input -stderr: -./calc.at:1354: $PREPARSER ./calc input -input: -stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1371: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -61203,79 +53558,129 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -61292,31 +53697,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -./calc.at:1374: $PREPARSER ./calc input -input: -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1370: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -stderr: -stderr: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error +syntax error +syntax error Starting parse Entering state 0 Stack now 0 @@ -61326,79 +53709,129 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -61415,53 +53848,13 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1371: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - syntax error -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 - | error -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1364: cat stderr -stderr: -input: -506. calc.at:1394: testing Calculator %glr-parser %verbose ... -./calc.at:1394: mv calc.y.tmp calc.y - -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 - | 1 = 2 = 3 -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: -stderr: -./calc.at:1354: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +syntax error +syntax error +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -61471,70 +53864,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1364: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1367: cat stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: cat stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: cat stderr stderr: -507. calc.at:1395: testing Calculator %glr-parser parse.error=verbose ... -./calc.at:1395: mv calc.y.tmp calc.y - -./calc.at:1369: cat stderr -stderr: -syntax error - | 1 2 -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1354: cat stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr input: -stderr: -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1369: $PREPARSER ./calc /dev/null -./calc.at:1395: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 input: -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1347: cat stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -61545,42 +53892,25 @@ }eg ' expout || exit 77 | 1 + 2 * 3 + !+ ++ +./calc.at:1344: cat stderr +./calc.at:1343: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.3: syntax error, unexpected number -input: -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1338: cat stderr stderr: stderr: +./calc.at:1360: $PREPARSER ./calc input stderr: stderr: -./calc.at:1368: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 stderr: stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: Starting parse Entering state 0 Stack now 0 @@ -61590,76 +53920,129 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -61676,23 +54059,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error, unexpected number -./calc.at:1358: cat stderr -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -61702,76 +54068,129 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -61788,247 +54207,455 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stdout: -input: -./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1360: cat stderr -./types.at:139: $PREPARSER ./test -./calc.at:1370: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1368: $PREPARSER ./calc input -stderr: -stdout: -./calc.at:1364: cat stderr -./calc.at:1362: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | 1 = 2 = 3 -stderr: -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1369: cat stderr -stderr: -./calc.at:1374: cat stderr -input: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1362: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -input: -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1367: $PREPARSER ./calc input - | - | +1 -./calc.at:1370: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1360: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Return for a new token: +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Return for a new token: Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Return for a new token: +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Return for a new token: Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Return for a new token: Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stderr: +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -input: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -input: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1364: $PREPARSER ./calc input - | 1 2 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1371: $PREPARSER ./calc input -input: -stderr: +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1362: $PREPARSER ./calc input -stderr: - | 1 = 2 = 3 -416. types.at:139: ok +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1374: $PREPARSER ./calc input | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1369: $PREPARSER ./calc input -stderr: -stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1348: cat stderr Starting parse Entering state 0 Stack now 0 @@ -62047,7 +54674,7 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -62065,12 +54692,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 @@ -62080,18 +54707,18 @@ Entering state 21 Stack now 0 8 21 4 12 21 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -62103,18 +54730,18 @@ Entering state 21 Stack now 0 8 21 4 12 21 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -62139,7 +54766,7 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -62148,7 +54775,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -62198,7 +54825,7 @@ Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -62207,7 +54834,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -62224,12 +54851,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 @@ -62239,18 +54866,18 @@ Entering state 22 Stack now 0 8 21 4 12 22 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 31 Stack now 0 8 21 4 12 22 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -62283,7 +54910,7 @@ Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -62292,7 +54919,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -62304,18 +54931,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): +Reducing stack by rule 6 (line 102): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) @@ -62327,68 +54954,29 @@ Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: input: -./calc.at:1368: cat stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; +input: +input: +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -62398,21 +54986,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1354: $PREPARSER ./calc input -syntax error -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1363: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -62423,9 +55018,19 @@ }eg ' expout || exit 77 stderr: + | 1 + 2 * 3 + !+ ++ + | 1 + 2 * 3 + !+ ++ + | 1 + 2 * 3 + !+ ++ +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1347: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1344: $PREPARSER ./calc input stderr: +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -62435,95 +55040,298 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -62540,101 +55348,394 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1375: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1368: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: cat stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: cat stderr +input: stderr: -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error +stderr: +stderr: +stderr: + | 1 + 2 * 3 + !* ++ +stderr: +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 22 -Stack now 0 8 21 30 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token '=' (1.11: ) +Next token is token '*' (1.39: ) Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) +Reading a token +Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -62644,927 +55745,3281 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Now at end of inp./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr +ut. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1364: $PREPARSER ./calc /dev/null +./calc.at:1368: $PREPARSER ./calc /dev/null +./calc.at:1337: cat stderr +input: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: cat stderr +./calc.at:1367: cat stderr +input: +./calc.at:1363: cat stderr +./calc.at:1354: cat stderr +./calc.at:1355: cat stderr +./calc.at:1353: cat stderr +./calc.at:1351: cat stderr +./calc.at:1358: cat stderr +./calc.at:1357: cat stderr + | 1 + 2 * 3 + !+ ++ +stderr: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1334: $PREPARSER ./calc input +stderr: +./calc.at:1350: cat stderr +./calc.at:1343: $PREPARSER ./calc input +stderr: +stderr: +1.14: memory exhausted +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 6 8 21 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 6 2 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 6 2 10 24 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1336: cat stderr +./calc.at:1341: cat stderr +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !* ++ +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1337: $PREPARSER ./calc input +stderr: +./calc.at:1363: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +1.14: memory exhausted +input: +input: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +input: +input: +input: +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +input: +input: + | 1 + 2 * 3 + !- ++ + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (* *) + (*) + (*) + | 1 + 2 * 3 + !- ++ + | (* *) + (*) + (*) + | (- *) + (1 2) = 1 + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !+ ++ + | (- *) + (1 2) = 1 +./calc.at:1367: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1357: $PREPARSER ./calc input + | (* *) + (*) + (*) +stderr: +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1347: $PREPARSER ./calc input +stderr: +./calc.at:1350: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1346: $PREPARSER ./calc input +stderr: +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1351: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +1.14: memory exhausted +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 + 2 * 3 + !+ ++ +stderr: + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1341: $PREPARSER ./calc input +stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +stderr: +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) Entering state 2 -Stack now 0 6 4 2 +Stack now 0 4 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) Entering state 2 -Stack now 0 6 2 2 2 +Stack now 0 4 2 +Return for a new token: Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 6 2 2 2 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Return for a new token: Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1343: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: cat stderr +stdout: +stderr: +stderr: +stderr: +stderr: +stderr: +./types.at:139: $PREPARSER ./test +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1368: cat stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1362: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1343: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Return for a new token: +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 +Return for a new token: Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 6 8 20 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Stack now 0 8 21 4 12 +Return for a new token: Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr +input: +input: +input: +input: + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !- ++ + | (!!) + (1 2) = 1 +./calc.at:1334: $PREPARSER ./calc input +stderr: +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1360: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1342: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' +memory exhausted +./calc.at:1344: cat stderr +./calc.at:1340: cat stderr +./calc.at:1347: cat stderr +input: +input: +input: +input: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1338: cat stderr +./calc.at:1364: cat stderr +./calc.at:1348: cat stderr + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !- ++ + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1 + 2 * 3 + !- ++ +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1336: $PREPARSER ./calc input +stderr: +./calc.at:1351: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1341: $PREPARSER ./calc input +stderr: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1362: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 6 8 24 33 24 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !* ++ +stderr: +stderr: +./calc.at:1344: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +stderr: +./calc.at:1340: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1347: $PREPARSER ./calc input +stderr: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +1.14: memory exhausted +stderr: +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +memory exhausted +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 6 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 4 12 24 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1363: cat stderr +stderr: + | (#) + (#) = 2222 + | (#) + (#) = 2222 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1338: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1364: $PREPARSER ./calc input +1.14: memory exhausted +stderr: +stderr: +stderr: +stderr: +stderr: +memory exhausted +memory exhausted +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 6 8 24 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) - | (- *) + (1 2) = 1 -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1360: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1350: cat stderr +./calc.at:1342: cat stderr +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -63574,6 +59029,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1337: cat stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: cat stderr +./calc.at:1355: cat stderr +./calc.at:1358: cat stderr +./calc.at:1357: cat stderr +stderr: +stderr: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +1.14: memory exhausted +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 Starting parse Entering state 0 Stack now 0 @@ -63883,43 +59362,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stderr: - -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr -stderr: -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stderr: -./calc.at:1367: cat stderr -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -stderr: -Starting parse +Reading aStarting parse Entering state 0 Stack now 0 Reading a token @@ -63928,95 +59371,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -64033,1057 +59502,742 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: cat stderr + token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1341: cat stderr +input: +input: +input: +input: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 + | 1 + 2 * 3 + !* ++ +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1342: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1 + 2 * 3 + !* ++ +./calc.at:1363: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1350: $PREPARSER ./calc input +memory exhausted +stderr: +1.14: memory exhausted +memory exhausted +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1351: cat stderr +./calc.at:1336: cat stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1353: cat stderr +input: +input: + | 1 + 2 * 3 + !+ ++ +stderr: + | 1 + 2 * 3 + !+ ++ + | (#) + (#) = 2222 + | (* *) + (*) + (*) +stderr: +./calc.at:1358: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1337: $PREPARSER ./calc input +stderr: +./calc.at:1355: $PREPARSER ./calc input +stderr: +stderr: +stderr: +memory exhausted +1.14: memory exhausted Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 -Stack now 0 6 8 21 30 +Stack now 0 8 21 30 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Stack now 0 8 21 4 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 6 8 20 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1334: cat stderr input: -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -./calc.at:1370: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -65093,28 +60247,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 -./calc.at:1375: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1363: $PREPARSER ./calc input -stderr: -./calc.at:1369: cat stderr -input: -stderr: - | - | +1 -./calc.at:1371: cat stderr -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1370: $PREPARSER ./calc /dev/null -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" @@ -65123,48 +60257,139 @@ ' expout || exit 77 input: input: -./calc.at:1364: cat stderr + | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !* ++ +./calc.at:1341: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1336: $PREPARSER ./calc input stderr: -memory exhausted -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' - | 1 2 -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | - | +1 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1374: cat stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +./calc.at:1351: $PREPARSER ./calc input +stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -stderr: +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 @@ -65179,33 +60404,82 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -stderr: -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -input: - | (!!) + (1 2) = 1 -memory exhausted -./calc.at:1368: cat stderr +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -65215,12 +60489,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -65230,1028 +60504,1118 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 101): +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 -Stack now 0 6 8 21 30 +Stack now 0 8 21 30 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 +Stack now 0 8 +Return for a new token: Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1360: cat stderr +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1343: cat stderr +stderr: + | (* *) + (*) + (*) +stderr: +./calc.at:1353: $PREPARSER ./calc input +stderr: + | 1 + 2 * 3 + !* ++ +stderr: +stderr: +stderr: +./calc.at:1334: $PREPARSER ./calc input +1.14: memory exhausted +memory exhausted +memory exhausted +memory exhausted +1.14: memory exhausted +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 -Stack now 0 6 2 2 2 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 -Stack now 0 6 8 20 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 6 8 20 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +stderr: +stdout: +./types.at:139: ./check +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +input: +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1362: cat stderr +' expout || exit 77 + | (!!) + (1 2) = 1 +./calc.at:1368: $PREPARSER ./calc input +stderr: +stderr: +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 +Stack now 0 4 +Return for a new token: Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 +Return for a new token: Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1364: $PREPARSER ./calc input +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1354: cat stderr -stderr: - | 1//2 -stderr: -stderr: -./calc.at:1371: $PREPARSER ./calc input -stderr: -stderr: +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) Starting parse Entering state 0 Stack now 0 @@ -66266,22 +61630,82 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -66291,12 +61715,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -66306,62 +61730,157 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 101): +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -66371,925 +61890,1381 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 -Stack now 0 6 8 21 30 +Stack now 0 8 21 30 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr +./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (#) + (#) = 2222 + | (- *) + (1 2) = 1 +stderr: +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1343: $PREPARSER ./calc input +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +stderr: +stderr: +memory exhausted +1.14: memory exhausted +memory exhausted +437. types.at:139: ok + +495. calc.at:1369: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1369: mv calc.y.tmp calc.y + +./calc.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: cat stderr +input: +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: cat stderr +./calc.at:1344: cat stderr +./calc.at:1348: cat stderr + | (- *) + (1 2) = 1 +stderr: +./calc.at:1362: $PREPARSER ./calc input +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 4 2 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: +stdout: +stderr: +./types.at:139: $PREPARSER ./test +stdout: +stderr: +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: $PREPARSER ./test +input: +input: +input: +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1340: cat stderr +./calc.at:1338: cat stderr +./calc.at:1367: cat stderr +input: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !- ++ +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1355: $PREPARSER ./calc input +stderr: + | (#) + (#) = 2222 +./calc.at:1346: $PREPARSER ./calc input +stderr: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +input: +input: +input: +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (#) + (#) = 2222 + | (1 + #) = 1111 +stderr: + | (!!) + (1 2) = 1 +./calc.at:1348: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1350: cat stderr +stderr: +./calc.at:1363: cat stderr +stderr: +stderr: +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1344: $PREPARSER ./calc input +stderr: +./calc.at:1347: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Stack now 0 8 21 4 12 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +======== Testing with C++ standard flags: '' +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +======== Testing with C++ standard flags: '' +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8input: - +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1358: cat stderr + | (1 + #) = 1111 + | (- *) + (1 2) = 1 + | (#) + (#) = 2222 +stderr: +./calc.at:1338: $PREPARSER ./calc input +stderr: +./calc.at:1340: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' +stderr: +./calc.at:1367: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +======== Testing with C++ standard flags: '' +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 @@ -67430,92 +63405,480 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1395: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 stderr: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test stderr: - | - | +1 -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: cat stderr input: - | (* *) + (*) + (*) +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1368: cat stderr +./calc.at:1342: cat stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1351: cat stderr +stderr: + | (!!) + (1 2) = 1 +stderr: + | (#) + (#) = 2222 +./types.at:139: $PREPARSER ./test +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1350: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +stderr: +stderr: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +stderr: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: $PREPARSER ./calc input -syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./types.at:139: $PREPARSER ./test +input: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1 + 2 * 3 + !+ ++ +stderr: +stderr: +./calc.at:1358: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -stderr: -./calc.at:1367: cat stderr -508. calc.at:1397: testing Calculator %glr-parser api.pure %locations ... - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1354: $PREPARSER ./calc input -stderr: -./calc.at:1370: cat stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -67656,8 +64019,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -67667,9 +64031,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -67679,7 +64041,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -67689,8 +64051,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1358: "$PERL" -pi -e 'use strict; +input: +./calc.at:1336: cat stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -67700,11 +64065,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -input: +./calc.at:1341: cat stderr input: -stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -67714,372 +64079,117 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -stderr: input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS | (#) + (#) = 2222 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1367: $PREPARSER ./calc input - | 1 2 -./calc.at:1397: mv calc.y.tmp calc.y - + | (#) + (#) = 2222 + | (1 + #) = 1111 +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +======== Testing with C++ standard flags: '' +stderr: +======== Testing with C++ standard flags: '' +./calc.at:1368: $PREPARSER ./calc input stderr: -./calc.at:1362: cat stderr -./calc.at:1358: cat stderr stderr: -./calc.at:1360: cat stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 12 21 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 23 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Return for a new token: Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1397: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1360: $PREPARSER ./calc /dev/null -./calc.at:1364: cat stderr -stderr: -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Return for a new token: +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +438. types.at:139: ./calc.at:1354: cat stderr syntax error: invalid character: '#' syntax error: invalid character: '#' -./calc.at:1368: cat stderr -input: -./calc.at:1369: cat stderr - | 1//2 -./calc.at:1362: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -./calc.at:1375: cat stderr -stderr: -./calc.at:1358: $PREPARSER ./calc /dev/null -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1371: cat stderr -input: -input: -./calc.at:1354: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' + ok +./calc.at:1357: cat stderr +./calc.at:1355: cat stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -68089,59 +64199,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1369: $PREPARSER ./calc input +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: cat stderr +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1353: cat stderr +./calc.at:1360: cat stderr stderr: - | (- *) + (1 2) = 1 -./calc.at:1364: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: 1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) 1.1-17: error: 2222 != 1 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -input: stderr: -./calc.at:1370: cat stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -68151,129 +64229,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 21 4 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -68290,91 +64360,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: cat stderr -stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !+ ++ -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: $PREPARSER ./calc input -stderr: -input: -stderr: -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -stderr: -stderr: -input: -485. calc.at:1354: ok Starting parse Entering state 0 Stack now 0 @@ -68384,129 +64369,79 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -68523,8 +64458,108 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Return for a new token: +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Return for a new token: +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Return for a new token: +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) input: -./calc.at:1358: "$PERL" -pi -e 'use strict; +input: +input: +input: +input: +input: +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -68534,26 +64569,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error - | error -./calc.at:1371: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 - | (!!) + (1 2) = 1 -./calc.at:1370: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1375: $PREPARSER ./calc input -stderr: -stderr: -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; +input: +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -68563,28 +64581,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1358: cat stderr -./calc.at:1374: cat stderr -stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: cat stderr +./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1362: cat stderr + | (#) + (#) = 2222 + | (#) + (#) = 2222 + | (#) + (#) = 2222 + | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !+ ++ + | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !* ++ +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1334: $PREPARSER ./calc input +./calc.at:1341: $PREPARSER ./calc input +./calc.at:1357: $PREPARSER ./calc input my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1364: "$PERL" -pi -e 'use strict; + +./calc.at:1346: cat stderr +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -68594,414 +64621,615 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - -syntax error: invalid character: '#' -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1397: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | (* *) + (*) + (*) stderr: -./calc.at:1374: $PREPARSER ./calc /dev/null stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.1: syntax error, unexpected invalid token -./calc.at:1364: cat stderr +496. calc.at:1370: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full ... +./calc.at:1360: $PREPARSER ./calc input stderr: -input: -./calc.at:1363: cat stderr -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1362: cat stderr -./calc.at:1369: cat stderr -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1360: cat stderr stderr: - | 1 + 2 * 3 + !- ++ stderr: stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1370: cat stderr -input: +./calc.at:1370: mv calc.y.tmp calc.y + stderr: - | 1//2 +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Return for a new token: +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Return for a new token: + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 -Stack now 0 8 21 4 12 21 -Return for a new token: +Stack now 0 8 21 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Return for a new token: +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 8 21 4 12 21 -Return for a new token: +Stack now 0 8 21 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 21 4 -Return for a new token: +Stack now 0 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +syntax error: invalid character: '#' +Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Stack now 0 8 21 4 -Shifting token error (1.23-25: ) +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 8 21 4 -Shifting token error (1.23-27: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 8 21 4 12 22 -Return for a new token: +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 8 21 4 12 22 31 -Return for a new token: +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '*' (1.39: ) +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Return for a new token: +Stack now 0 4 12 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: -syntax error -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: $PREPARSER ./calc input -input: +./calc.at:1367: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (* *) + (*) + (*) -./calc.at:1364: $PREPARSER ./calc input - | error -stderr: -stderr: -./calc.at:1362: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1360: $PREPARSER ./calc input -stderr: +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -69011,47 +65239,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -input: -stderr: +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: cat stderr +./calc.at:1348: cat stderr + | (1 + #) = 1111 | (* *) + (*) + (*) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 + | 1 + 2 * 3 + !- ++ +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1358: $PREPARSER ./calc input stderr: -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1371: cat stderr Starting parse Entering state 0 Stack now 0 @@ -69086,7 +65284,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -69121,7 +65319,7 @@ Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -69130,7 +65328,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -69164,7 +65362,7 @@ Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -69173,7 +65371,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -69184,760 +65382,315 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: - | (- *) + (1 2) = 1 -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1369: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +input: +./calc.at:1347: cat stderr +./calc.at:1344: cat stderr + | (1 + #) = 1111 +stderr: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: stderr: +stderr: +stderr: +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1368: cat stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Return for a new token: Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Return for a new token: -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 8 21 30 Return for a new token: Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Return for a new token: Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Return for a new token: Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.20: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 Return for a new token: Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Return for a new token: Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 8 21 4 12 22 -Return for a new token: +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 8 21 4 12 22 31 -Return for a new token: -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Return for a new token: -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 8 21 30 22 31 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1367: cat stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1375: cat stderr -./calc.at:1368: cat stderr +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of inp./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -ut. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -509. calc.at:1398: testing Calculator %glr-parser parse.error=verbose %locations ... -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1398: mv calc.y.tmp calc.y - -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1398: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -69972,7 +65725,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -70007,7 +65760,7 @@ Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -70016,7 +65769,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -70050,7 +65803,7 @@ Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -70059,7 +65812,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -70070,518 +65823,29 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -stderr: -stderr: - | (# + 1) = 1111 -input: - | 1 = 2 = 3 -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of inp1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -ut. +Now at end of input. Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !* ++ -./calc.at:1368: $PREPARSER ./calc input -stderr: - | 1 = 2 = 3 -./calc.at:1375: $PREPARSER ./calc input -stderr: syntax error: invalid character: '#' -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr -./calc.at:1363: cat stderr -1.14: memory exhausted -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: cat stderr -./calc.at:1370: cat stderr -stderr: -stderr: -./calc.at:1374: cat stderr -stderr: -./calc.at:1362: cat stderr -1.7: syntax error, unexpected '=' -./calc.at:1369: cat stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error: invalid character: '#' -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -input: -input: - | 1 = 2 = 3 -input: -stderr: -./calc.at:1362: $PREPARSER ./calc input -stderr: - | error -1.7: syntax error, unexpected '=' -./calc.at:1363: $PREPARSER ./calc input -input: -input: - | 1 + 2 * 3 + !+ ++ -input: -stderr: -./calc.at:1371: cat stderr -input: -1.14: memory exhausted - | (!!) + (1 2) = 1 - | (* *) + (*) + (*) -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1358: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 - | 1 + 2 * 3 + !+ ++ -./calc.at:1364: $PREPARSER ./calc input -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1360: cat stderr -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1369: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -70590,166 +65854,136 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Return for a new token: Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +syntax error: invalid character: '#' +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1367: cat stderr -input: +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr +./calc.at:1350: cat stderr +./calc.at:1363: cat stderr stderr: -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -70759,12 +65993,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -70774,18 +66008,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -70793,7 +66027,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -70810,263 +66044,18 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1368: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Return for a new token: -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -input: -stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 - | (!!) + (1 2) = 1 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1360: $PREPARSER ./calc input -input: - | (#) + (#) = 2222 -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1375: cat stderr -stderr: -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -stderr: +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -71075,86 +66064,87 @@ Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): +Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 14 Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) +Reducing stack by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) stderr: -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stdout: +stdout: Starting parse Entering state 0 Stack now 0 @@ -71164,121 +66154,132 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -71295,21 +66296,31 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1367: $PREPARSER ./calc input -stderr: -./calc.at:1370: cat stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1369: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stdout: +stdout: +stdout: +./types.at:139: ./check +./types.at:139: ./check +./types.at:139: ./check +./types.at:139: ./check +./types.at:139: ./check +./types.at:139: $PREPARSER ./test +input: +input: +input: +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -71319,296 +66330,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error: invalid character: '#' -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: +./calc.at:1340: cat stderr input: +./calc.at:1370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS input: -stderr: -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) - | 1 + 2 * 3 + !- ++ -./calc.at:1398: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1362: cat stderr -./calc.at:1369: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ - | 1 + 2 * 3 + !- ++ -input: -syntax error: invalid character: '#' -./calc.at:1370: $PREPARSER ./calc input -stderr: - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | - | +1 -./calc.at:1363: cat stderr -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1375: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1371: cat stderr -./calc.at:1358: cat stderr -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1368: cat stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error, unexpected '+' -input: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1371: $PREPARSER ./calc /dev/null -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1 = 2 = 3 -./calc.at:1363: $PREPARSER ./calc input - | - | +1 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -71618,83 +66354,93 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 + | (1 + #) = 1111 + | (* *) + (*) + (*) + | (- *) + (1 2) = 1 +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1364: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1348: $PREPARSER ./calc input +stderr: stderr: +./calc.at:1347: $PREPARSER ./calc input stderr: +./calc.at:1351: cat stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: input: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1369: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -71702,7 +66448,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -71713,132 +66459,26 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 13 Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): +Reducing stack by rule 18 (line 131): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stderr: -./calc.at:1374: cat stderr - | (1 + #) = 1111 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -input: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: cat stderr -2.1: syntax error, unexpected '+' -./calc.at:1368: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -./calc.at:1367: cat stderr -./calc.at:1369: cat stderr -stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1358: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1370: $PREPARSER ./calc input -input: -stderr: -input: -stderr: - | (- *) + (1 2) = 1 -stderr: -./calc.at:1364: cat stderr -1.6: syntax error: invalid character: '#' -syntax error -error: 2222 != 1 +./calc.at:1367: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -71847,34 +66487,27 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Return for a new token: Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) @@ -71887,92 +66520,91 @@ -> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '=' (1.15: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -71980,7 +66612,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -71990,7 +66622,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -71999,73 +66630,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !* ++ -stderr: -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -stderr: -syntax error -error: 2222 != 1 -stderr: -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.14: memory exhausted -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -72074,20 +66638,18 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token '-' (1.2: ) Shifting token '-' (1.2: ) Entering state 2 Stack now 0 4 2 -Return for a new token: Reading a token Next token is token '*' (1.4: ) 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): +Reducing stack by rule 15 (line 115): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -72101,75 +66663,68 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) -> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token -Next token is token number (1.12: 2) +Next token is token "number" (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Stack now 0 8 21 4 Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Error: popping token error (1.10-12: ) Stack now 0 8 21 4 Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) -> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -72180,21 +66735,19 @@ Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-13: 2222) $2 = token '=' (1.15: ) $3 = nterm exp (1.17: 1) @@ -72206,27 +66759,158 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1367: $PREPARSER ./calc input +syntax error: invalid character: '#' +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +stdout: +stdout: +1.2: syntax error: invalid character: '#' +./types.at:139: ./check +./types.at:139: ./check +./calc.at:1337: cat stderr +input: +input: +input: +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1355: cat stderr + | (# + 1) = 1111 +stderr: + | (1 + #) = 1111 +stderr: + | (1 + #) = 1111 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | (- *) + (1 2) = 1 + | (* *) + (*) + (*) +stderr: +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1338: $PREPARSER ./calc input +stderr: +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1363: $PREPARSER ./calc input +stderr: +======== Testing with C++ standard flags: '' +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.6: syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1342: cat stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.2: syntax error: invalid character: '#' +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: cat stderr + | (1 + #) = 1111 +./calc.at:1351: $PREPARSER ./calc input +stderr: +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +stderr: +stderr: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -72246,7 +66930,7 @@ Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): +Reducing stack by rule 15 (line 115): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -72265,7 +66949,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -72283,25 +66967,25 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 Reading a token -Next token is token number (1.12: 2) +Next token is token "number" (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Stack now 0 8 21 4 Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Error: popping token error (1.10-12: ) Stack now 0 8 21 4 Shifting token error (1.10-12: ) @@ -72312,7 +66996,7 @@ Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -72321,7 +67005,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -72333,18 +67017,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-13: 2222) $2 = token '=' (1.15: ) $3 = nterm exp (1.17: 1) @@ -72356,67 +67040,27 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: null divisor -input: -stderr: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1371: cat stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1368: cat stderr -stderr: -1.14: memory exhausted -./calc.at:1375: $PREPARSER ./calc /dev/null -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1370: cat stderr -stderr: -stderr: -error: null divisor Starting parse Entering state 0 Stack now 0 @@ -72436,7 +67080,7 @@ Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): +Reducing stack by rule 15 (line 115): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -72455,7 +67099,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -72473,25 +67117,25 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 Reading a token -Next token is token number (1.12: 2) +Next token is token "number" (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Stack now 0 8 21 4 Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Error: popping token error (1.10-12: ) Stack now 0 8 21 4 Shifting token error (1.10-12: ) @@ -72502,7 +67146,7 @@ Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -72511,7 +67155,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -72523,18 +67167,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-13: 2222) $2 = token '=' (1.15: ) $3 = nterm exp (1.17: 1) @@ -72546,110 +67190,27 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -1.1: syntax error, unexpected end of file -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1374: "$PERL" -pi -e 'use strict; +input: +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -72659,8 +67220,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +./calc.at:1360: cat stderr + | (# + 1) = 1111 + | (#) + (#) = 2222 + | 1 + 2 * 3 + !- ++ +stderr: + | (1 + #) = 1111 +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -72670,26 +67244,76 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1362: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1358: cat stderr -./calc.at:1369: cat stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: $PREPARSER ./calc /dev/null +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +syntax error: invalid character: '#' +./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.2: syntax error: invalid character: '#' +./calc.at:1355: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +stdout: +stdout: +1.6: syntax error: invalid character: '#' +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +input: +./calc.at:1334: cat stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1341: cat stderr +./calc.at:1357: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1362: cat stderr + | (#) + (#) = 2222 +./calc.at:1354: $PREPARSER ./calc input +stderr: +stderr: + | 1 + 2 * 3 + !+ ++ +stderr: +stderr: +./calc.at:1360: $PREPARSER ./calc input +stderr: +stderr: stderr: stderr: stderr: -./calc.at:1363: cat stderr - | 1 + 2 * 3 + !* ++ -1.1: syntax error, unexpected end of file -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -72698,160 +67322,80 @@ Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): +Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1360: cat stderr -input: -./calc.at:1367: cat stderr -./calc.at:1374: cat stderr -1.14: memory exhausted -input: -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1358: $PREPARSER ./calc input - | - | +1 -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (# + 1) = 1111 -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1368: $PREPARSER ./calc input -input: -stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +syntax error: invalid character: '#' stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -72860,205 +67404,93 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error: invalid character: '#' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: - | (* *) + (*) + (*) -./calc.at:1375: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -493. calc.at:1367: ./calc.at:1371: cat stderr - ok -./calc.at:1360: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -stderr: -./calc.at:1364: cat stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.14: memory exhausted Starting parse Entering state 0 Stack now 0 @@ -73067,212 +67499,146 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: -Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) -> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) Stack now 0 8 21 4 -Shifting token error (1.10: ) +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1375: $PREPARSER ./calc input -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -stderr: Starting parse Entering state 0 Stack now 0 @@ -73282,132 +67648,102 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error: invalid character: '#' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) Stack now 0 8 21 4 -Shifting token error (1.10: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -73424,53 +67760,18 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (!!) + (1 2) = 1 -./calc.at:1371: $PREPARSER ./calc input -stdout: -stderr: -stderr: -./calc.at:1362: cat stderr -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1390: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1363: "$PERL" -pi -e 'use strict; +input: +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stdout: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1370: cat stderr -input: - -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -73480,200 +67781,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1364: $PREPARSER ./calc input +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1358: $PREPARSER ./calc input +input: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr + | 1 2 +./calc.at:1336: cat stderr +./calc.at:1369: $PREPARSER ./calc input stderr: -./calc.at:1368: cat stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1363: cat stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -input: -./calc.at:1369: cat stderr - | (#) + (#) = 2222 -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1358: cat stderr stderr: stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./scanner.at:326: $PREPARSER ./input -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1362: $PREPARSER ./calc input -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1363: $PREPARSER ./calc /dev/null -input: - | (1 + # + 1) = 1111 +======== Testing with C++ standard flags: '' +stderr: +======== Testing with C++ standard flags: '' Starting parse Entering state 0 Stack now 0 @@ -73687,8 +67817,8 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) @@ -73699,7 +67829,7 @@ Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -73721,8 +67851,8 @@ Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) Error: popping token error (1.8: ) Stack now 0 8 21 4 Shifting token error (1.8: ) @@ -73733,7 +67863,7 @@ Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -73742,7 +67872,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -73754,18 +67884,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): +Reducing stack by rule 6 (line 102): $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) $3 = nterm exp (1.13-16: 2222) @@ -73776,411 +67906,108 @@ Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1368: $PREPARSER ./calc input -stderr: -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -input: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: cat stderr -stderr: -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1369: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: $PREPARSER ./calc input -stderr: -stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr -./calc.at:1360: cat stderr -stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -74202,90 +68029,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -74302,349 +68103,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.6: syntax error: invalid character: '#' -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1374: cat stderr -stderr: - | 1 2 -input: -463. scanner.at:326: ./calc.at:1390: $PREPARSER ./calc input - ok -stderr: - | 1 + 2 * 3 + !+ ++ -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: $PREPARSER ./calc input -510. calc.at:1400: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -74658,13 +68116,11 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -74675,13 +68131,11 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 -Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -74692,7 +68146,6 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 -Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -74714,13 +68167,11 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 -Return for a new token: Reading a token Next token is token '+' (1.14: ) Shifting token '+' (1.14: ) @@ -74732,242 +68183,155 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: mv calc.y.tmp calc.y - -input: -input: - | (!!) + (1 2) = 1 -./calc.at:1370: cat stderr -stderr: - | (- *) + (1 2) = 1 -1.6: syntax error: invalid character: '#' -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1368: cat stderr -input: -./calc.at:1375: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1374: $PREPARSER ./calc input - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -1.3: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -syntax error -syntax error -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1400: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1364: cat stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 input: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -syntax error -stderr: -1.3: syntax error -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1367: cat stderr +./calc.at:1368: cat stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -74977,12 +68341,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1369: cat stderr input: -./calc.at:1362: cat stderr +input: +input: +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: cat stderr +./calc.at:1343: cat stderr + | (1 + #) = 1111 + | (#) + (#) = 2222 +stderr: +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1334: $PREPARSER ./calc input +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) Starting parse Entering state 0 Stack now 0 @@ -74996,11 +68379,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -75011,11 +68396,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -75026,6 +68413,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -75047,38 +68435,34 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (1 + 1) / (1 - 1) -./calc.at:1368: $PREPARSER ./calc input -input: +======== Testing with C++ standard flags: '' | (1 + #) = 1111 -./calc.at:1370: $PREPARSER ./calc input +./calc.at:1341: $PREPARSER ./calc input stderr: +stdout: +./calc.at:1370: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' input: -stderr: - | 1 + 2 * 3 + !- ++ -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 - | (1 + #) = 1111 -./calc.at:1358: $PREPARSER ./calc input -1.11-17: error: null divisor -./calc.at:1390: "$PERL" -pi -e 'use strict; +input: +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -75088,13 +68472,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -75104,8 +68482,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1348: cat stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1363: cat stderr + | (1 + #) = 1111 + | (# + 1) = 1111 +./calc.at:1336: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +./calc.at:1350: cat stderr +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1344: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: Starting parse Entering state 0 @@ -75120,13 +68514,11 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -75137,13 +68529,11 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 -Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -75154,7 +68544,6 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 -Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -75176,28 +68565,22 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 -Return for a new token: Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -input: - | (!!) + (1 2) = 1 -./calc.at:1362: $PREPARSER ./calc input -stdout: Starting parse Entering state 0 Stack now 0 @@ -75207,127 +68590,217 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: popping nterm exp (1.1: 1) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (# + 1) = 1111 -./calc.at:1371: cat stderr -input: -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1360: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./calc.at:1369: $PREPARSER ./calc input -stderr: -stderr: -stderr: -stderr: -stderr: -stderr: -./calc.at:1390: cat stderr -1.2: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -./calc.at:1375: cat stderr Starting parse Entering state 0 Stack now 0 @@ -75341,13 +68814,11 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -75358,13 +68829,11 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 -Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -75375,7 +68844,6 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 -Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -75397,21 +68865,19 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 -Return for a new token: Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) @@ -75424,111 +68890,92 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 @@ -75555,8 +69002,18 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1374: cat stderr stderr: +./calc.at:1370: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + Starting parse Entering state 0 Stack now 0 @@ -75570,11 +69027,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -75585,11 +69044,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -75600,6 +69061,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -75621,136 +69083,82 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1368: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1 + 2 * 3 + !+ ++ +stderr: | (* *) + (*) + (*) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1364: $PREPARSER ./calc input ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -511. calc.at:1401: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose api.pure ... -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1371: $PREPARSER ./calc input +syntax error: invalid character: '#' stderr: +1.2: syntax error: invalid character: '#' +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +syntax error: invalid character: '#' +stdout: input: +./types.at:139: ./check + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1338: cat stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -75760,25 +69168,49 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' -415. types.at:139: ./calc.at:1401: mv calc.y.tmp calc.y - - ok +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1//2 +./calc.at:1347: cat stderr +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +stderr: | 1 + 2 * 3 + !+ ++ -./calc.at:1374: $PREPARSER ./calc input + | (1 + # + 1) = 1111 + | (# + 1) = 1111 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1348: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1343: $PREPARSER ./calc input stderr: -./calc.at:1390: $PREPARSER ./calc input stderr: - | (- *) + (1 2) = 1 stderr: -./calc.at:1370: cat stderr -./calc.at:1375: $PREPARSER ./calc input stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1363: cat stderr Starting parse Entering state 0 Stack now 0 @@ -75788,111 +69220,92 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 @@ -75928,334 +69341,255 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.7-18: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.20: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) Stack now 0 8 21 4 -Shifting token error (1.23-27: ) +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1353: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 +Stack now 0 4 12 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error -494. calc.at:1368: ok -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) Starting parse Entering state 0 Stack now 0 @@ -76326,32 +69660,23 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1364: cat stderr -./calc.at:1401: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1369: cat stderr - +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1362: "$PERL" -pi -e 'use strict; +input: +./calc.at:1340: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -76361,12 +69686,55 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (1 + # + 1) = 1111 +./calc.at:1369: cat stderr +stderr: +./calc.at:1351: cat stderr +stderr: +./calc.at:1355: cat stderr +stderr: +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +1.6: syntax error: invalid character: '#' | (# + 1) = 1111 -./calc.at:1370: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error -./calc.at:1360: "$PERL" -pi -e 'use strict; +stderr: +stderr: +./calc.at:1347: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.6: syntax error: invalid character: '#' +stderr: +stderr: +./calc.at:1350: $PREPARSER ./calc input +stdout: +stdout: +./types.at:139: ./check +./types.at:139: ./check +input: +input: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -76377,308 +69745,258 @@ }eg ' expout || exit 77 input: -./calc.at:1400: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +input: +./calc.at:1337: cat stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1342: cat stderr +./calc.at:1358: cat stderr +./calc.at:1354: cat stderr +input: +input: + | (# + 1) = 1111 + | (1 + #) = 1111 + | (# + 1) = 1111 + | 1 + 2 * 3 + !- ++ + | (* *) + (*) + (*) +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1340: $PREPARSER ./calc input +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | 1 + 2 * 3 + !- ++ + | 1//2 +stderr: +stderr: +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1362: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1353: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.23-27: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.44: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -76695,11 +70013,14 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.6: syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +stdout: +./types.at:139: ./check input: -input: -./calc.at:1362: cat stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -76709,38 +70030,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1370: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: - | (# + 1) = 1111 - | 1 + 2 * 3 + !* ++ -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.2: syntax error: invalid character: '#' - | (1 + # + 1) = 1111 -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1369: $PREPARSER ./calc input - -input: stderr: -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1360: cat stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr stderr: stderr: -1.6: syntax error: invalid character: '#' +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -76750,29 +70052,186 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +1.2: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 @@ -76782,7 +70241,7 @@ Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -76795,18 +70254,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): +Reducing stack by rule 6 (line 102): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) @@ -76817,36 +70276,121 @@ Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +1.2: syntax error: invalid character: '#' +stdout: +1.6: syntax error: invalid character: '#' +stdout: +stderr: +stderr: +./types.at:139: ./check +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./types.at:139: ./check Starting parse Entering state 0 Stack now 0 @@ -76860,13 +70404,11 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -76877,13 +70419,11 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 -Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -76894,7 +70434,6 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 -Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -76916,40 +70455,82 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 -Return for a new token: Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1371: cat stderr input: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +input: +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: cat stderr + | (1 + #) = 1111 stderr: - | (- *) + (1 2) = 1 -stderr: -1.2: syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1363: cat stderr + | (# + 1) = 1111 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1354: $PREPARSER ./calc input + | (#) + (#) = 2222 + | (1 + # + 1) = 1111 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1358: $PREPARSER ./calc input stderr: -./calc.at:1362: $PREPARSER ./calc input +stderr: +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) Starting parse Entering state 0 Stack now 0 @@ -76963,13 +70544,11 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -76980,13 +70559,11 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 -Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -76997,7 +70574,6 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 -Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -77019,27 +70595,52 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 -Return for a new token: Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +1.2: syntax error: invalid character: '#' +./calc.at:1368: cat stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1346: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -77049,76 +70650,95 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -77135,16 +70755,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: $PREPARSER ./calc input -input: -stderr: - | (* *) + (*) + (*) -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1390: cat stderr -1.6: syntax error: invalid character: '#' -stderr: - | 1 + 2 * 3 + !+ ++ -stderr: Starting parse Entering state 0 Stack now 0 @@ -77154,26 +70764,22 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 @@ -77183,7 +70789,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -77201,218 +70807,186 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: cat stderr -input: -512. calc.at:1402: testing Calculator %glr-parser parse.error=detailed %locations %header %name-prefix "calc" %verbose ... Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 15 Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1402: mv calc.y.tmp calc.y - -./calc.at:1371: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1363: $PREPARSER ./calc input -stderr: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -input: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) Starting parse Entering state 0 Stack now 0 @@ -77421,112 +70995,102 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: +1.2: syntax error: invalid character: '#' Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: +1.8: syntax error: invalid character: '#' Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 @@ -77534,33 +71098,248 @@ Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1369: cat stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token number (1.1: 1) Shifting token number (1.1: 1) Entering state 1 @@ -77627,183 +71406,21 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1390: $PREPARSER ./calc input -513. calc.at:1403: testing Calculator %glr-parser parse.error=verbose %locations %header %name-prefix "calc" %verbose ... -./calc.at:1402: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1358: cat stderr -stderr: -stderr: - | (1 + # + 1) = 1111 -./calc.at:1370: $PREPARSER ./calc input -stderr: -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: -stderr: -./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.1: syntax error -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -77813,22 +71430,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1403: mv calc.y.tmp calc.y - -./calc.at:1401: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.6: syntax error: invalid character: '#' +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: cat stderr +input: +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -77838,271 +71465,79 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 +Stack now 0 4 12 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1364: cat stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -input: -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: - | (1 + 1) / (1 - 1) -./calc.at:1369: $PREPARSER ./calc input -stderr: -stderr: - | 1 + 2 * 3 + !- ++ -1.11-17: error: null divisor -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1374: cat stderr -stderr: 1.6: syntax error: invalid character: '#' -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' -Reading a token -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -1.8: syntax error: invalid character: '#' -Reading a token -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -78111,7 +71546,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -78120,30 +71554,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: cat stderr -stderr: -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -78153,19 +71564,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1362: cat stderr -input: input: -stderr: - | (#) + (#) = 2222 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1363: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) + | (1 + #) = 1111 Starting parse Entering state 0 Stack now 0 @@ -78226,7 +71628,7 @@ Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -78280,20 +71682,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1374: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -78425,577 +71813,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: cat stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (* *) + (*) + (*) -stderr: -./calc.at:1362: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -stderr: -stderr: -input: -memory exhausted -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: - | (- *) + (1 2) = 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./calc.at:1390: cat stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -memory exhausted - | (1 + 1) / (1 - 1) -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1369: cat stderr -stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1358: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !* ++ +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -79005,459 +71825,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: cat stderr -stderr: -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: -1.11-17: error: null divisor -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: cat stderr -./calc.at:1358: cat stderr - | 1 = 2 = 3 -./calc.at:1402: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1390: $PREPARSER ./calc input -495. calc.at:1369: ok -stderr: -1.11-17: error: null divisor - | 1 + 2 * 3 + !* ++ -./calc.at:1371: $PREPARSER ./calc input -input: stderr: - | 1 + 2 * 3 + !- ++ -1.7: syntax error -input: -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1334: cat stderr +./calc.at:1360: cat stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -79467,11 +71839,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1362: "$PERL" -pi -e 'use strict; +./calc.at:1367: cat stderr +input: +./calc.at:1336: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -79481,12 +71852,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1370: cat stderr -stderr: -stderr: -stderr: -input: -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1341: cat stderr +./calc.at:1364: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -79496,12 +71864,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.14: memory exhausted +input: +./calc.at:1344: cat stderr +./calc.at:1370: cat stderr + | (1 + # + 1) = 1111 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1346: $PREPARSER ./calc input stderr: -1.7: syntax error | (1 + 1) / (1 - 1) -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1403: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +stderr: +./calc.at:1348: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -79510,7 +71883,6 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -79521,15 +71893,13 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' Reading a token +1.6: syntax error: invalid character: '#' Error: popping token '+' (1.4: ) Stack now 0 4 12 Error: popping nterm exp (1.2: 1) @@ -79544,7 +71914,6 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -79557,13 +71926,11 @@ -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -79574,7 +71941,6 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -79599,7 +71965,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -79608,9 +71973,21 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +1.14: memory exhausted +./calc.at:1362: cat stderr +./calc.at:1369: cat stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -79620,157 +71997,150 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1367: $PREPARSER ./calc input +input: +./calc.at:1350: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: + | (# + 1) = 1111 +./calc.at:1334: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1360: $PREPARSER ./calc input +memory exhausted +1.6: syntax error: invalid character: '#' +./calc.at:1338: cat stderr +./calc.at:1347: cat stderr +./calc.at:1343: cat stderr +stderr: +1.11-17: error: null divisor +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1363: cat stderr +./calc.at:1354: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: cat stderr +stderr: +stderr: +stderr: +1.14: memory exhausted stderr: -./calc.at:1360: cat stderr - Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -496. calc.at:1370: ok -stderr: -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr -./calc.at:1363: cat stderr +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) 1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 @@ -79779,7 +72149,6 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -79790,15 +72159,13 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' Reading a token +1.6: syntax error: invalid character: '#' Error: popping token '+' (1.4: ) Stack now 0 4 12 Error: popping nterm exp (1.2: 1) @@ -79813,7 +72180,6 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -79826,13 +72192,11 @@ -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -79843,7 +72207,6 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -79868,7 +72231,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -79877,18 +72239,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -79898,287 +72248,107 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: cat stderr - | (1 + #) = 1111 -./calc.at:1362: cat stderr -./calc.at:1360: $PREPARSER ./calc input -input: -input: -stderr: - | (#) + (#) = 2222 -stderr: - | (* *) + (*) + (*) -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1390: cat stderr - -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1374: $PREPARSER ./calc input +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) input: input: - | (#) + (#) = 2222 -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1371: $PREPARSER ./calc input input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr -stderr: -./calc.at:1364: cat stderr -stderr: -./calc.at:1375: "$PERL" -pi -e 'use strict; +input: +input: +input: +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +input: +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error + | 1//2 +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -80188,8 +72358,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: + | 1 + 2 * 3 + !* ++ + | (1 + # + 1) = 1111 +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1370: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1336: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1347: $PREPARSER ./calc input ./calc.at:1362: $PREPARSER ./calc input +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -80199,264 +72379,116 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +syntax error: invalid character: '#' +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | - | +1 -./calc.at:1390: $PREPARSER ./calc input +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1368: cat stderr + | (# + 1) = 1111 + | (1 + # + 1) = 1111 +./calc.at:1344: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1364: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1341: $PREPARSER ./calc input stderr: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1351: cat stderr +memory exhausted +stderr: +stderr: +stderr: +stderr: +stdout: +1.11-17: error: null divisor +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: -514. calc.at:1405: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... -./calc.at:1405: mv calc.y.tmp calc.y - -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' syntax error: invalid character: '#' -2.1: syntax error stderr: -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -80527,176 +72559,69 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +./types.at:139: $PREPARSER ./test +1.11-17: error: null divisor input: -492. calc.at:1364: ok +input: +input: +input: +input: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 + | 1 + 2 * 3 + !+ ++ + | (# + 1) = 1111 +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1358: cat stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1375: cat stderr stderr: +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1340: cat stderr +syntax error: invalid character: '#' | (# + 1) = 1111 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1354: $PREPARSER ./calc input +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' + | (1 + # + 1) = 1111 +stderr: +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -80706,132 +72631,94 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1358: $PREPARSER ./calc input -2.1: syntax error -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1371: cat stderr -./calc.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +1.11-17: error: null divisor +stderr: +stderr: +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' -Reading a token -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: cat stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !* ++ -515. calc.at:1407: testing Calculator %glr-parser %debug ... -./calc.at:1375: $PREPARSER ./calc input +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 @@ -80902,55 +72789,109 @@ Entering state 5 Stack now 0 8 21 5 Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: cat stderr +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token Next token is token '+' (1.14: ) Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): +Reducing stack by rule 17 (line 117): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1407: mv calc.y.tmp calc.y - -./calc.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - | (1 + #) = 1111 - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: cat stderr -stderr: - - | (# + 1) = 1111 -input: -./calc.at:1371: $PREPARSER ./calc input -1.14: memory exhausted -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1362: $PREPARSER ./calc input +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -80959,9 +72900,8 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' Reading a token +1.2: syntax error: invalid character: '#' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -80972,7 +72912,6 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Error: discarding token '+' (1.4: ) @@ -80981,7 +72920,6 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token number (1.6: 1) Error: discarding token number (1.6: 1) @@ -80990,26 +72928,23 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -81020,7 +72955,6 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -81045,7 +72979,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -81054,12 +72987,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.6: syntax error: invalid character: '#' -stderr: -stderr: -stderr: -1.14: memory exhausted +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -81101,7 +73029,7 @@ Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -81155,17 +73083,82 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +input: +input: +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: cat stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 + | (1 + # + 1) = 1111 +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +======== Testing with C++ standard flags: '' +stderr: +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +input: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1342: cat stderr +./calc.at:1367: cat stderr +./calc.at:1334: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1357: cat stderr +stderr: +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -81175,12 +73168,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -81190,18 +73183,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -81209,7 +73202,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -81226,35 +73219,17 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) - | 1 + 2 * 3 + !+ ++ -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -./calc.at:1390: cat stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -81264,16 +73239,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | (#) + (#) = 2222 +./calc.at:1367: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -81283,12 +73261,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -81298,18 +73276,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -81317,7 +73295,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -81334,17 +73312,20 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: +./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1363: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -81353,34 +73334,41 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' +Reading a token +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -81393,11 +73381,13 @@ -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -81408,6 +73398,7 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -81432,6 +73423,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -81440,17 +73432,26 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (#) + (#) = 2222 +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1348: cat stderr +stderr: +./calc.at:1360: cat stderr stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -81460,12 +73461,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -81475,18 +73476,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -81494,7 +73495,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -81511,247 +73512,305 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -' expout || exit 77 +stderr: +stderr: +stderr: +stderr: 1.6: syntax error: invalid character: '#' -./calc.at:1390: $PREPARSER ./calc /dev/null -./calc.at:1358: cat stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -input: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1375: cat stderr -1.1: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: cat stderr - | (1 + #) = 1111 -./calc.at:1374: $PREPARSER ./calc input -stderr: -./calc.at:1362: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +input: +input: +input: +input: +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -syntax error: invalid character: '#' -input: -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error - | 1 + 2 * 3 + !* ++ + | (1 + 1) / (1 - 1) + | (1 + # + 1) = 1111 | (1 + # + 1) = 1111 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1405: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -input: -./calc.at:1358: $PREPARSER ./calc input | 1 + 2 * 3 + !- ++ +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1334: $PREPARSER ./calc input +./calc.at:1337: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: stderr: stderr: -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 4 12 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -syntax error: invalid character: '#' -./calc.at:1363: $PREPARSER ./calc input -stderr: -./calc.at:1360: cat stderr -stderr: -./calc.at:1407: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS - | (# + 1) = 1111 -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) Starting parse Entering state 0 Stack now 0 @@ -81832,8 +73891,6 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -81842,105 +73899,77 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -81949,7 +73978,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -81958,33 +73986,12 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -stdout: -1.2: syntax error: invalid character: '#' - | (#) + (#) = 2222 -stderr: -516. calc.at:1408: testing Calculator %glr-parser parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.6: syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: -stdout: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -81993,105 +74000,77 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' -Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -82100,7 +74079,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -82109,22 +74087,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -input: -./calc.at:1393: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. -./calc.at:1390: cat stderr - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - Starting parse Entering state 0 Stack now 0 @@ -82205,116 +74167,7 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -1.2: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - - | (1 + # + 1) = 1111 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1408: mv calc.y.tmp calc.y - stderr: -./calc.at:1371: cat stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: Starting parse Entering state 0 @@ -82324,6 +74177,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -82334,13 +74188,15 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Reading a token +Return for a new token: 1.6: syntax error: invalid character: '#' +Reading a token Error: popping token '+' (1.4: ) Stack now 0 4 12 Error: popping nterm exp (1.2: 1) @@ -82355,65 +74211,53 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -82422,6 +74266,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -82430,93 +74275,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -input: -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1408: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1374: cat stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1363: cat stderr -./calc.at:1393: $PREPARSER ./calc input -stderr: - | (1 + # + 1) = 1111 -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1371: $PREPARSER ./calc input -stderr: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: -input: -./calc.at:1358: cat stderr -stderr: -input: -stderr: -stderr: Starting parse Entering state 0 Stack now 0 @@ -82526,291 +74284,160 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +syntax error: invalid character: '#' +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' - | (# + 1) = 1111 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1362: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1363: $PREPARSER ./calc input -input: -stderr: -stderr: -stderr: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1389: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -input: -./calc.at:1375: cat stderr -stderr: -input: -stderr: -stderr: -syntax error - | 1 2 - | (#) + (#) = 2222 -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: $PREPARSER ./calc input -stderr: - | (1 + 1) / (1 - 1) -stdout: -stderr: -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1387: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +481. calc.at:1348: ok -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -stderr: -stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; +497. calc.at:1371: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full parse.lac=full ... +./calc.at:1371: mv calc.y.tmp calc.y + +./calc.at:1371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: +./calc.at:1346: cat stderr +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: cat stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1347: cat stderr +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stdout: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1387: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $PREPARSER ./calc input + | (#) + (#) = 2222 stderr: -./calc.at:1392: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -syntax error +./calc.at:1360: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -82819,38 +74446,34 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Return for a new token: Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) -> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 -Return for a new token: Reading a token Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -82861,57 +74484,51 @@ Shifting token ')' (1.7: ) Entering state 27 Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 125): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '/' (1.9: ) Shifting token '/' (1.9: ) Entering state 23 Stack now 0 8 23 -Return for a new token: Reading a token Next token is token '(' (1.11: ) Shifting token '(' (1.11: ) Entering state 4 Stack now 0 8 23 4 -Return for a new token: Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) -> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 -Return for a new token: Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 20 Stack now 0 8 23 4 12 20 -Return for a new token: Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 -Return for a new token: Reading a token Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 106): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -82922,17 +74539,16 @@ Shifting token ')' (1.17: ) Entering state 27 Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 125): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) -> $$ = nterm exp (1.11-17: 0) Entering state 32 Stack now 0 8 23 32 -Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): +Reducing stack by rule 10 (line 108): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -82944,18 +74560,130 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1368: cat stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1370: cat stderr +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -82964,8 +74692,123 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) Starting parse Entering state 0 Stack now 0 @@ -83157,25 +75000,126 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: cat stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr +./calc.at:1371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error: invalid character: '#' input: -stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1390: "$PERL" -pi -e 'use strict; +input: +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -83185,36 +75129,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1360: cat stderr -syntax error -stdout: input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1397: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -83225,14 +75141,42 @@ }eg ' expout || exit 77 input: -stderr: +input: | (1 + 1) / (1 - 1) -./calc.at:1371: $PREPARSER ./calc input +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | (1 + 1) / (1 - 1) + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: cat stderr +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1362: cat stderr +./calc.at:1350: cat stderr + | (1 + #) = 1111 +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1347: $PREPARSER ./calc input stderr: -./calc.at:1392: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -83242,6 +75186,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1369: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -83250,134 +75202,77 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Return for a new token: +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Return for a new token: Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Return for a new token: -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Return for a new token: +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Return for a new token: -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Return for a new token: + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -83386,7 +75281,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -83395,24 +75289,11 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | error +./calc.at:1370: $PREPARSER ./calc input stderr: -./calc.at:1408: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: - | (1 + 1) / (1 - 1) -input: -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -83422,32 +75303,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1360: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: $PREPARSER ./calc input -1.11-17: error: null divisor -./calc.at:1374: cat stderr -stderr: -./calc.at:1390: cat stderr -stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -83457,9 +75314,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1363: cat stderr -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: Starting parse Entering state 0 Stack now 0 @@ -83469,12 +75323,12 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 @@ -83484,18 +75338,18 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) -> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 105): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -83506,7 +75360,7 @@ Shifting token ')' (1.7: ) Entering state 27 Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 125): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -83524,12 +75378,12 @@ Entering state 4 Stack now 0 8 23 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) -> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 @@ -83539,18 +75393,18 @@ Entering state 20 Stack now 0 8 23 4 12 20 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 106): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -83561,7 +75415,7 @@ Shifting token ')' (1.17: ) Entering state 27 Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 125): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -83570,7 +75424,7 @@ Stack now 0 8 23 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): +Reducing stack by rule 10 (line 108): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -83582,46 +75436,49 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1389: cat stderr - | 1 2 -./calc.at:1387: $PREPARSER ./calc input +stdout: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./types.at:139: ./check +473. calc.at:1338: ok + +498. calc.at:1374: testing Calculator %start input exp NUM api.value.type=union ... +./calc.at:1374: mv calc.y.tmp calc.y + +./calc.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -stderr: + | (1 + # + 1) = 1111 +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1355: cat stderr ./calc.at:1358: cat stderr +./calc.at:1351: cat stderr +stderr: +stderr: +stderr: +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: Starting parse Entering state 0 Stack now 0 @@ -83743,233 +75600,62 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (!!) + (1 2) = 1 - | 1 2 -./calc.at:1392: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -stderr: -./calc.at:1374: $PREPARSER ./calc input -syntax error -input: -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1375: cat stderr -stderr: -input: -stderr: -stderr: -stderr: -1.11-17: error: null divisor -stderr: -stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (#) + (#) = 2222 -./calc.at:1393: cat stderr -syntax error -./calc.at:1362: "$PERL" -pi -e 'use strict; +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -input: - | 1 2 my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1397: $PREPARSER ./calc input -488. calc.at:1358: ./calc.at:1363: $PREPARSER ./calc input -stderr: -./calc.at:1389: $PREPARSER ./calc input - ok -stdout: -stderr: -syntax error: invalid character: '#' -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +1.6: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1350: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1363: cat stderr +./calc.at:1344: cat stderr +./calc.at:1364: cat stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -stdout: -stderr: -stderr: - | (# + 1) = 1111 +./calc.at:1367: cat stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -syntax error -./calc.at:1371: cat stderr -./calc.at:1375: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./calc.at:1362: cat stderr +input: +./calc.at:1354: cat stderr +./calc.at:1374: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1343: cat stderr + | (1 + # + 1) = 1111 stderr: + | (1 + # + 1) = 1111 stderr: -syntax error: invalid character: '#' -./calc.at:1391: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - +./calc.at:1355: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1341: $PREPARSER ./calc input stderr: - | 1//2 -syntax error -1.3: syntax error -syntax error +./calc.at:1351: $PREPARSER ./calc input stderr: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -83983,8 +75669,8 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) @@ -83995,7 +75681,7 @@ Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -84017,8 +75703,8 @@ Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) Error: popping token error (1.8: ) Stack now 0 8 21 4 Shifting token error (1.8: ) @@ -84029,7 +75715,7 @@ Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -84038,7 +75724,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -84050,18 +75736,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): +Reducing stack by rule 6 (line 102): $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) $3 = nterm exp (1.13-16: 2222) @@ -84072,31 +75758,33 @@ Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.11-17: error: null divisor +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.11-17: error: null divisor +1.6: syntax error: invalid character: '#' stderr: -497. calc.at:1371: ok -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -1.2: syntax error: invalid character: '#' -./calc.at:1360: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -84106,12 +75794,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -84121,30 +75804,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -1.3: syntax error - | (1 + #) = 1111 -./calc.at:1362: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1391: $PREPARSER ./calc input -syntax error -stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -84154,10 +75814,165 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +stdout: +syntax error: invalid character: '#' +1.11-17: error: null divisor +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.11-17: error: null divisor +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +input: +input: +input: +input: + | (1 + 1) / (1 - 1) + | (# + 1) = 1111 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1353: cat stderr +./types.at:139: $PREPARSER ./test +./calc.at:1340: cat stderr +input: +./calc.at:1344: $PREPARSER ./calc input +input: +./calc.at:1357: cat stderr +stderr: +1.11-17: error: null divisor + | 1 + 2 * 3 + !* ++ +./calc.at:1363: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1367: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1343: $PREPARSER ./calc input +stderr: +stderr: +syntax error: invalid character: '#' +stderr: Starting parse Entering state 0 Stack now 0 @@ -84171,8 +75986,8 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) @@ -84183,7 +75998,7 @@ Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -84205,8 +76020,8 @@ Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) Error: popping token error (1.8: ) Stack now 0 8 21 4 Shifting token error (1.8: ) @@ -84217,7 +76032,7 @@ Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -84226,7 +76041,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -84238,18 +76053,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): +Reducing stack by rule 6 (line 102): $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) $3 = nterm exp (1.13-16: 2222) @@ -84260,25 +76075,39 @@ Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1354: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +stderr: +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -84319,48 +76148,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -84377,44 +76222,175 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.2: syntax error: invalid character: '#' -syntax error -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: cat stderr -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +error: null divisor +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.11-17: error: null divisor +./calc.at:1368: cat stderr +./calc.at:1370: cat stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -84432,37 +76408,38 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: +1.2: syntax error: invalid character: '#' Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Return for a new token: +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -84475,11 +76452,13 @@ -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -84490,6 +76469,7 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -84514,6 +76494,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -84522,119 +76503,31 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - -stderr: -./calc.at:1374: cat stderr -./calc.at:1390: cat stderr -./calc.at:1392: cat stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -489. calc.at:1360: ok - | 1 2 -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -input: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + 1) / (1 - 1) -./calc.at:1374: $PREPARSER ./calc input -stderr: -./calc.at:1387: cat stderr -./calc.at:1375: cat stderr - | (- *) + (1 2) = 1 -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: -1.3: syntax error -stderr: -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: null divisor -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: cat stderr - | 1//2 -./calc.at:1392: $PREPARSER ./calc input -stderr: -./calc.at:1390: $PREPARSER ./calc input input: -1.3: syntax error +./calc.at:1342: cat stderr +./calc.at:1360: cat stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: - | (1 + # + 1) = 1111 -./calc.at:1362: cat stderr -syntax error -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 stderr: -./calc.at:1387: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' input: -stderr: -error: null divisor -stderr: -./calc.at:1389: cat stderr -./calc.at:1397: cat stderr -stderr: -517. calc.at:1409: testing Calculator %glr-parser parse.error=verbose %debug %locations %header api.prefix={calc} api.token.prefix={TOK_} %verbose ... -1.6: syntax error: invalid character: '#' -./calc.at:1393: cat stderr - -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: cat stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -syntax error my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -stderr: -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1409: mv calc.y.tmp calc.y - -stderr: -input: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +./calc.at:1337: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1340: $PREPARSER ./calc input +error: null divisor Starting parse Entering state 0 Stack now 0 @@ -84644,12 +76537,12 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 @@ -84667,121 +76560,96 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) Error: popping token error (1.2-6: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error -input: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: -input: - | (# + 1) = 1111 -./calc.at:1362: $PREPARSER ./calc input - | error -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -stderr: -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1393: $PREPARSER ./calc input -stderr: -stderr: + | (1 + #) = 1111 +./calc.at:1353: $PREPARSER ./calc input + | (1 + # + 1) = 1111 stderr: -./calc.at:1391: cat stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -518. calc.at:1411: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +./calc.at:1357: $PREPARSER ./calc input stderr: stderr: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -84791,95 +76659,147 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.3: syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +stdout: +stderr: +stdout: +./calc.at:1371: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +error: null divisor +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +./calc.at:1369: cat stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 + | (# + 1) = 1111 + | (1 + 1) / (1 - 1) +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1370: $PREPARSER ./calc input +stderr: +./calc.at:1342: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' +stderr: +./calc.at:1371: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stderr: stderr: +stderr: +stderr: +stderr: +error: null divisor Starting parse Entering state 0 Stack now 0 @@ -84889,12 +76809,12 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 @@ -84912,156 +76832,169 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) Error: popping token error (1.2-6: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1411: mv calc.y.tmp calc.y - -stdout: -stderr: -syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr -syntax error -stderr: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr -input: -./calc.at:1411: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -1.3: syntax error -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1394: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - - | 1//2 -stderr: -stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1392: cat stderr -syntax error -syntax error -stderr: -stderr: -input: -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -input: -1.3: syntax error -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 123 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 @@ -85070,8 +77003,9 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Reading a token +Return for a new token: 1.2: syntax error: invalid character: '#' +Reading a token Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -85082,6 +77016,7 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Error: discarding token '+' (1.4: ) @@ -85090,6 +77025,7 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token number (1.6: 1) Error: discarding token number (1.6: 1) @@ -85098,6 +77034,7 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -85110,11 +77047,13 @@ -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -85125,6 +77064,7 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -85149,6 +77089,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -85157,47 +77098,92 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1374: $PREPARSER ./calc --num input -input: -stderr: -./calc.at:1363: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +472. calc.at:1337: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok input: -stderr: - | error -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1394: $PREPARSER ./calc input -1.3: syntax error -stderr: -stderr: -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1390: cat stderr -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85208,11 +77194,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -519. calc.at:1413: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -stderr: -stderr: -syntax error -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85222,16 +77204,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: cat stderr -1.11-17: error: null divisor -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -input: - | (* *) + (*) + (*) -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1397: cat stderr -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85241,24 +77214,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: mv calc.y.tmp calc.y - -stderr: -input: -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1362: cat stderr - | 1 2 -./calc.at:1394: $PREPARSER ./calc input - | (# + 1) = 1111 -./calc.at:1413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1347: cat stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85268,19 +77227,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -stderr: -stderr: -stderr: - | 1 + 2 * 3 -./calc.at:1374: $PREPARSER ./calc --num input -./calc.at:1391: cat stderr -input: -input: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85290,139 +77237,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -syntax error -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -input: - | error -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1393: cat stderr - | error -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1375: cat stderr -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1391: $PREPARSER ./calc input -stderr: -stderr: -stderr: -1.1: syntax error -syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1334: $PREPARSER ./calc input +./calc.at:1350: cat stderr + stderr: -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -85484,7 +77304,7 @@ Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -85538,6 +77358,17 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' + | (1 + #) = 1111 +./calc.at:1360: $PREPARSER ./calc input +error: null divisor +stderr: +stderr: +stderr: +======== Testing with C++ standard flags: '' +stdout: +error: null divisor +stderr: Starting parse Entering state 0 Stack now 0 @@ -85547,162 +77378,266 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error -stderr: -./calc.at:1409: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -input: -stderr: -1.1: syntax error +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1374: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) stderr: -syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor input: - | 1 = 2 = 3 -1.1: syntax error -./calc.at:1389: cat stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: $PREPARSER ./calc input - | 123 -./calc.at:1375: $PREPARSER ./calc --num input -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: cat stderr stderr: input: -syntax error -./calc.at:1390: cat stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | + | +1 +./calc.at:1369: $PREPARSER ./calc input stderr: - | 1 = 2 = 3 -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: $PREPARSER ./calc input -1.1: syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) Starting parse Entering state 0 Stack now 0 @@ -85743,64 +77678,48 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -85817,84 +77736,23 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -stderr: -' expout || exit 77 -./calc.at:1363: cat stderr -input: -stderr: -syntax error - | 1 + 2 * 3 + !+ ++ -./calc.at:1392: cat stderr -./calc.at:1390: $PREPARSER ./calc input -syntax error -stderr: -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1397: cat stderr -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1394: cat stderr -input: stderr: -input: -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1391: cat stderr -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: stderr: - | 1 = 2 = 3 -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1362: cat stderr +error: null divisor +1.2: syntax error: invalid character: '#' +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + Starting parse Entering state 0 Stack now 0 @@ -85904,12 +77762,12 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 @@ -85927,8 +77785,8 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) Error: popping token error (1.2-6: ) Stack now 0 4 Shifting token error (1.2-6: ) @@ -85943,8 +77801,8 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) Error: popping token error (1.2-8: ) Stack now 0 4 Shifting token error (1.2-10: ) @@ -85955,7 +77813,7 @@ Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -85968,18 +77826,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) -> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): +Reducing stack by rule 6 (line 102): $1 = nterm exp (1.1-11: 1111) $2 = token '=' (1.13: ) $3 = nterm exp (1.15-18: 1111) @@ -85990,60 +77848,116 @@ Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-18: 1111) $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1374: cat stderr -syntax error -./calc.at:1413: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +stderr: +482. calc.at:1350: 480. calc.at:1347: ok + ok +./calc.at:1362: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +499. calc.at:1375: testing Calculator %start input exp NUM api.value.type=union %locations parse.error=detailed ... +./calc.at:1375: mv calc.y.tmp calc.y + +./calc.at:1375: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + + +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1346: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1351: cat stderr +500. calc.at:1387: testing Calculator %glr-parser ... +501. calc.at:1389: testing Calculator %glr-parser %header ... + | (1 + 1) / (1 - 1) stderr: - | 1 = 2 = 3 -./calc.at:1392: $PREPARSER ./calc input -1.7: syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1336: cat stderr stderr: input: - | 1 + 2 * 3 -input: -input: -./calc.at:1375: $PREPARSER ./calc --num input - | 1//2 - | 1 = 2 = 3 -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1387: cat stderr -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1390: $PREPARSER ./calc input -stderr: +./calc.at:1387: mv calc.y.tmp calc.y + +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: mv calc.y.tmp calc.y + stderr: -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1393: cat stderr stderr: -input: -1.7: syntax error stderr: -1.3: syntax error, unexpected '+', expecting end of file +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1387: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1374: $PREPARSER ./calc input stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -86053,12 +77967,12 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 @@ -86076,120 +77990,79 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) Error: popping token error (1.2-6: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error - | 1 + 2 * 3 -1.7: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1374: $PREPARSER ./calc --exp input -stderr: -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -stderr: -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./types.at:139: $PREPARSER ./test +stdout: stderr: - | 1 = 2 = 3 - | - | +1 -./calc.at:1393: $PREPARSER ./calc input stderr: +./types.at:139: $PREPARSER ./test Starting parse Entering state 0 Stack now 0 @@ -86199,149 +78072,153 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 4 12 21 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 30 Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) Entering state 12 Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 27 Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 118): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) Entering state 23 Stack now 0 8 23 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 8 23 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 Stack now 0 8 23 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) Entering state 20 Stack now 0 8 23 4 12 20 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) Entering state 12 Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 27 Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) Entering state 32 Stack now 0 8 23 32 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1387: $PREPARSER ./calc input -stderr: -1.3: syntax error, unexpected '+', expecting end of file -stderr: -1.7: syntax error +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr +483. calc.at:1351: ./calc.at:1389: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + ok +479. calc.at:1346: ok stderr: -syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: + + +./types.at:139: ./check stderr: -./calc.at:1397: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: $PREPARSER ./test +input: +input: +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86351,11 +78228,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1367: cat stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86365,8 +78239,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1341: cat stderr +./calc.at:1363: cat stderr +./calc.at:1364: cat stderr +./calc.at:1343: cat stderr +./calc.at:1358: cat stderr + | (1 + 1) / (1 - 1) + | (1 + #) = 1111 +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86376,8 +78256,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -syntax error +stderr: +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1375: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: ./check +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -86413,7 +78323,7 @@ Stack now 0 4 12 21 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -86424,7 +78334,7 @@ Shifting token ')' (1.7: ) Entering state 27 Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -86468,7 +78378,7 @@ Stack now 0 8 23 4 12 20 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -86479,7 +78389,7 @@ Shifting token ')' (1.17: ) Entering state 27 Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -86488,7 +78398,7 @@ Stack now 0 8 23 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): +Reducing stack by rule 10 (line 121): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -86519,34 +78429,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1363: cat stderr -stderr: -syntax error -stderr: -./calc.at:1375: cat stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86556,26 +78439,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +477. calc.at:1343: ok +503. calc.at:1391: testing Calculator %glr-parser %locations api.location.type={Span} ... +./calc.at:1391: mv calc.y.tmp calc.y + +502. calc.at:1390: testing Calculator %glr-parser %locations ... +./calc.at:1390: mv calc.y.tmp calc.y + +./calc.at:1391: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + +504. calc.at:1392: testing Calculator %glr-parser %name-prefix "calc" ... +./calc.at:1392: mv calc.y.tmp calc.y + +./calc.at:1392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: -./calc.at:1391: cat stderr -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1370: cat stderr input: - | - | +1 -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86585,47 +78467,568 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1397: cat stderr input: -stderr: - | 1 + 2 * 3 -498. calc.at:1374: ok -Starting parse -Entering state 0 -Stack now 0 +input: +./calc.at:1368: cat stderr +input: +./calc.at:1389: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1354: cat stderr +./calc.at:1387: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: + | 1 2 + | (1 + # + 1) = 1111 + | (# + 1) = 1111 + | (#) + (#) = 2222 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1367: $PREPARSER ./calc input + | (#) + (#) = 2222 +stderr: +./calc.at:1364: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +stderr: +======== Testing with C++ standard flags: '' +./calc.at:1341: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +error: null divisor +======== Testing with C++ standard flags: '' +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1344: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1353: cat stderr +./calc.at:1391: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +syntax error: invalid character: '#' +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +./calc.at:1340: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +./calc.at:1360: cat stderr +./calc.at:1357: cat stderr +error: null divisor +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stdout: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stdout: +./types.at:139: $PREPARSER ./test +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) -> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -86636,7 +79039,7 @@ Shifting token ')' (1.7: ) Entering state 27 Stack now 0 4 12 27 -Reducing stack by rule 13 (line 113): +Reducing stack by rule 13 (line 138): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -86654,12 +79057,12 @@ Entering state 4 Stack now 0 8 23 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) -> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 @@ -86669,18 +79072,18 @@ Entering state 20 Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 100): +Reducing stack by rule 8 (line 119): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -86691,7 +79094,7 @@ Shifting token ')' (1.17: ) Entering state 27 Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 113): +Reducing stack by rule 13 (line 138): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -86700,7 +79103,7 @@ Stack now 0 8 23 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): +Reducing stack by rule 10 (line 121): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -86712,12 +79115,133 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) @@ -86731,9 +79255,313 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1375: $PREPARSER ./calc --exp input +./types.at:139: $PREPARSER ./test +478. calc.at:1344: ok + +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' +Reading a token +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +505. calc.at:1393: testing Calculator %glr-parser api.prefix={calc} ... +./calc.at:1393: mv calc.y.tmp calc.y + +./calc.at:1393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: ./check +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +./types.at:139: $PREPARSER ./test +input: +input: +input: +input: +input: +./calc.at:1369: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | (# + 1) = 1111 + | (1 + 1) / (1 - 1) + | + | +1 +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1353: $PREPARSER ./calc input + | 1 2 +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1354: $PREPARSER ./calc input +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor + | (1 + # + 1) = 1111 +./calc.at:1368: $PREPARSER ./calc input +stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: $PREPARSER ./calc /dev/null +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +474. calc.at:1340: ok +./calc.at:1342: cat stderr +./calc.at:1334: cat stderr +input: +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86743,15 +79571,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: cat stderr +./calc.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | (1 + 1) / (1 - 1) + | (# + 1) = 1111 +stderr: +stderr: +./calc.at:1357: $PREPARSER ./calc input +stderr: +stderr: +stderr: +======== Testing with C++ standard flags: '' +stderr: +======== Testing with C++ standard flags: '' +stderr: +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' stderr: -./calc.at:1390: cat stderr -input: -./calc.at:1392: cat stderr -input: stderr: Starting parse Entering state 0 @@ -86762,12 +79597,234 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +syntax error +./calc.at:1360: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 @@ -86777,18 +79834,18 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) -> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 99): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -86799,7 +79856,7 @@ Shifting token ')' (1.7: ) Entering state 27 Stack now 0 4 12 27 -Reducing stack by rule 13 (line 113): +Reducing stack by rule 13 (line 138): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -86817,12 +79874,12 @@ Entering state 4 Stack now 0 8 23 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) -> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 @@ -86832,18 +79889,18 @@ Entering state 20 Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 100): +Reducing stack by rule 8 (line 119): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -86854,7 +79911,7 @@ Shifting token ')' (1.17: ) Entering state 27 Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 113): +Reducing stack by rule 13 (line 138): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -86863,7 +79920,7 @@ Stack now 0 8 23 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): +Reducing stack by rule 10 (line 121): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -86875,182 +79932,44 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | - | +1 - | - | +1 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +476. calc.at:1342: 470. calc.at:1334: ok + ok -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1394: cat stderr -./calc.at:1393: cat stderr -stderr: -stderr: -stderr: -2.1: syntax error -input: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error -stderr: -490. calc.at:1362: ok -syntax error - | - | +1 -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: cat stderr -input: -input: - | error -stderr: -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1393: $PREPARSER ./calc /dev/null -2.1: syntax error -stderr: -./calc.at:1394: $PREPARSER ./calc input -2.1: syntax error -stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./calc.at:1390: $PREPARSER ./calc input -syntax error -./calc.at:1395: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c -stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -input: -stderr: -1.14: memory exhausted -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -499. calc.at:1375: ok - | - | +1 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./calc.at:1363: cat stderr -input: -stderr: -stderr: -syntax error - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1395: $PREPARSER ./calc input -1.14: memory exhausted -syntax error -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -520. calc.at:1414: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1414: mv calc.y.tmp calc.y -491. calc.at:1363: -syntax error - ok -./calc.at:1397: cat stderr +506. calc.at:1394: testing Calculator %glr-parser %verbose ... +./calc.at:1394: mv calc.y.tmp calc.y -stderr: -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +507. calc.at:1395: testing Calculator %glr-parser parse.error=verbose ... +./calc.at:1395: mv calc.y.tmp calc.y + +508. calc.at:1397: testing Calculator %glr-parser api.pure %locations ... +./calc.at:1395: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1397: mv calc.y.tmp calc.y + +./calc.at:1397: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87059,10 +79978,9 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg -./calc.at:1397: $PREPARSER ./calc /dev/null ' expout || exit 77 -input: -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87072,8 +79990,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87083,8 +80000,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87094,182 +80011,761 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: cat stderr -stderr: -1.1: syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1389: $PREPARSER ./calc /dev/null -./calc.at:1391: $PREPARSER ./calc /dev/null -1.1: syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -syntax error, unexpected number -syntax error -./calc.at:1394: cat stderr -1.1: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: cat stderr -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: cat stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +======== Testing with C++ standard flags: '' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: cat stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: -./calc.at:1392: cat stderr - -syntax error -input: -./calc.at:1392: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: stderr: -input: -input: -1.1: syntax error +./calc.at:1367: cat stderr stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -syntax error, unexpected number - | 1 = 2 = 3 -./calc.at:1394: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1393: $PREPARSER ./calc input -syntax error -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: -521. calc.at:1416: testing Calculator %glr-parser %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... stderr: stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1397: cat stderr -./calc.at:1387: cat stderr -./calc.at:1416: mv calc.y.tmp calc.y - -syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1355: cat stderr +./calc.at:1358: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1364: cat stderr +./calc.at:1371: cat stderr +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stdout: stderr: +stdout: +stdout: syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -input: -522. calc.at:1426: testing Calculator lalr1.cc %header ... -syntax error -./calc.at:1387: $PREPARSER ./calc /dev/null -./calc.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: stderr: -./calc.at:1426: mv calc.y.tmp calc.y - -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: cat stderr stderr: +stdout: +./types.at:139: ./check +./types.at:139: ./check +./calc.at:1375: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./types.at:139: ./check stderr: -syntax error -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: -523. calc.at:1431: testing Calculator C++ ... -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1394: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: ./check +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87279,7 +80775,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87289,77 +80785,54 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -./calc.at:1431: mv calc.y.tmp calc.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1375: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1431: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1395: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1394: cat stderr -./calc.at:1393: cat stderr -./calc.at:1389: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -input: -./calc.at:1390: cat stderr -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -stderr: -./calc.at:1392: cat stderr -./calc.at:1395: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1397: cat stderr -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 - | - | +1 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1395: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1336: cat stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | (# + 1) = 1111 +./calc.at:1362: $PREPARSER ./calc input +486. calc.at:1355: ok input: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 input: -./calc.at:1394: $PREPARSER ./calc input -stderr: -./calc.at:1414: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - | (!!) + (1 2) = 1 - | (1 + #) = 1111 -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) + | (1 + # + 1) = 1111 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1358: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y input: -stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; + + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87369,82 +80842,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1392: $PREPARSER ./calc input +./calc.at:1370: cat stderr +./calc.at:1369: cat stderr input: -stderr: -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (!!) + (1 2) = 1 -./calc.at:1397: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -syntax error -stderr: -syntax error -error: 2222 != 1 -stderr: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1426: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1387: cat stderr -stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -1.11: syntax error -1.1-16: error: 2222 != 1 -1.6: syntax error: invalid character: '#' -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87454,42 +80856,116 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr -./calc.at:1389: cat stderr + | 1//2 +./calc.at:1363: cat stderr +./calc.at:1371: $PREPARSER ./calc input + | (1 + #) = 1111 stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1387: $PREPARSER ./calc input +./calc.at:1364: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1391: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +509. calc.at:1398: testing Calculator %glr-parser parse.error=verbose %locations ... +./calc.at:1398: mv calc.y.tmp calc.y + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +471. calc.at:1336: ok +./calc.at:1398: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1368: cat stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87499,207 +80975,568 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stderr: - | error -input: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 - | (!!) + (1 2) = 1 -./calc.at:1394: cat stderr -./calc.at:1395: $PREPARSER ./calc input -input: -stderr: -syntax error, unexpected invalid token - | (!!) + (1 2) = 1 -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1394: $PREPARSER ./calc /dev/null -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1397: cat stderr -./calc.at:1393: cat stderr -syntax error -./calc.at:1392: cat stderr -syntax error -error: 2222 != 1 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Return for a new token: +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Return for a new token: +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Return for a new token: +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Return for a new token: +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Return for a new token: +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Return for a new token: +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1341: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -syntax error -syntax error, unexpected invalid token -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -error: 2222 != 1 -input: -./calc.at:1390: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1397: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (!!) + (1 2) = 1 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) stderr: -input: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1370: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Return for a new token: +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Return for a new token: +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Return for a new token: +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Return for a new token: +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Return for a new token: +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Return for a new token: +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + stderr: -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1387: cat stderr +stdout: stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -syntax error -error: 2222 != 1 +./types.at:139: ./check +stdout: +./types.at:139: $PREPARSER ./test stderr: +stdout: +./calc.at:1387: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +510. calc.at:1400: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... +./calc.at:1400: mv calc.y.tmp calc.y + stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -syntax error -error: 2222 != 1 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1395: cat stderr +stdout: stderr: -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1400: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stdout: +./types.at:139: ./check +./types.at:139: ./check stderr: -input: -1.2: syntax error: invalid character: '#' -syntax error -error: 2222 != 1 - | (!!) + (1 2) = 1 -./calc.at:1389: cat stderr -./calc.at:1387: $PREPARSER ./calc input +stdout: +./types.at:139: ./check stderr: -syntax error -error: 2222 != 1 +stdout: +./types.at:139: ./check input: -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1394: cat stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1397: cat stderr -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1391: cat stderr -stderr: input: input: - | (- *) + (1 2) = 1 -./calc.at:1389: $PREPARSER ./calc input -syntax error -error: 2222 != 1 -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87709,147 +81546,306 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1398: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | (1 + #) = 1111 + | (1 + 1) / (1 - 1) | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1394: $PREPARSER ./calc input -syntax error, unexpected '=' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1393: cat stderr -input: -input: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: cat stderr +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1369: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1357: cat stderr +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '=' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1390: cat stderr - | (* *) + (*) + (*) -./calc.at:1397: $PREPARSER ./calc input -syntax error -syntax error -error: 2222 != 1 -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1392: cat stderr -input: - | (* *) + (*) + (*) +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1360: cat stderr stderr: +syntax error: invalid character: '#' +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS input: -./calc.at:1393: $PREPARSER ./calc input -1.2: syntax error -1.10: syntax error -1.16: syntax error - | (1 + # + 1) = 1111 -./calc.at:1390: $PREPARSER ./calc input -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 stderr: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +475. calc.at:1341: stderr: stderr: -input: -syntax error -syntax error -syntax error stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token 1.6: syntax error: invalid character: '#' -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -error: 2222 != 1 -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1387: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 + ok stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +487. calc.at:1357: 485. calc.at:1354: ok + ok + + + stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 stderr: -1.6: syntax error: invalid character: '#' -syntax error -syntax error -syntax error -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stdout: +stdout: +./calc.at:1392: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1390: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + stderr: -./calc.at:1395: cat stderr -syntax error -syntax error -error: 2222 != 1 -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1387: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1390: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1391: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +511. calc.at:1401: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose api.pure ... +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87859,8 +81855,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87870,211 +81865,98 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1401: mv calc.y.tmp calc.y + input: - | - | +1 -./calc.at:1395: $PREPARSER ./calc input -input: +512. calc.at:1402: testing Calculator %glr-parser parse.error=detailed %locations %header %name-prefix "calc" %verbose ... +513. calc.at:1403: testing Calculator %glr-parser parse.error=verbose %locations %header %name-prefix "calc" %verbose ... + | (1 + # + 1) = 1111 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: $PREPARSER ./calc input input: -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1402: mv calc.y.tmp calc.y + +./calc.at:1403: mv calc.y.tmp calc.y + +======== Testing with C++ standard flags: '' stderr: -syntax error, unexpected '+' -./calc.at:1390: cat stderr - | (- *) + (1 2) = 1 - | (* *) + (*) + (*) -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1394: cat stderr -./calc.at:1391: cat stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1391: $PREPARSER ./calc input stderr: -syntax error -syntax error -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + 1) / (1 - 1) -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1397: cat stderr -stderr: -stderr: -stderr: -syntax error -syntax error -error: 2222 != 1 -1.11-17: error: null divisor ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '+' -input: -./calc.at:1393: cat stderr -syntax error -syntax error -syntax error - | (* *) + (*) + (*) -./calc.at:1391: $PREPARSER ./calc input -input: +./calc.at:1401: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: - | 1 + 2 * 3 + !+ ++ - | (!!) + (1 2) = 1 -stderr: -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1394: $PREPARSER ./calc input 1.11-17: error: null divisor -stderr: -stderr: -input: -syntax error -syntax error -error: 2222 != 1 - | 1 + 2 * 3 + !+ ++ -stderr: -syntax error -error: 2222 != 1 -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: cat stderr -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -error: 2222 != 1 -stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -input: -./calc.at:1395: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1397: $PREPARSER ./calc input - | (* *) + (*) + (*) + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1392: $PREPARSER ./calc input -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1402: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: -./calc.at:1389: cat stderr -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: $PREPARSER ./calc /dev/null - | 1 + 2 * 3 + !- ++ -stderr: -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1390: cat stderr -./calc.at:1393: $PREPARSER ./calc input -syntax error -syntax error -syntax error -stderr: +./calc.at:1353: cat stderr +input: +./calc.at:1367: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1400: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1362: cat stderr +./calc.at:1358: cat stderr stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | 1 2 + | 1//2 stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1374: $PREPARSER ./calc input stderr: -syntax error, unexpected end of input -stdout: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test stderr: -input: -502. calc.at:1390: ok -./calc.at:1394: cat stderr stderr: -syntax error, unexpected end of input stderr: -./calc.at:1391: cat stderr - | 1 + 2 * 3 + !+ ++ stderr: -syntax error -syntax error -syntax error -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1371: cat stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88084,24 +81966,397 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: + | 1 2 +./calc.at:1387: $PREPARSER ./calc input +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1394: $PREPARSER ./calc input -input: -stdout: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1 + 2 * 3 + !+ ++ + | (1 + # + 1) = 1111 +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error, unexpected number +488. calc.at:1358: ok +./calc.at:1368: cat stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1364: cat stderr +stderr: +stderr: +input: +./calc.at:1369: cat stderr +input: + | (1 + 1) / (1 - 1) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: syntax error +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1367: $PREPARSER ./calc input +stderr: +./calc.at:1362: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + syntax error -error: 2222 != 1 -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./calc.at:1398: "$PERL" -ne ' + | error +./calc.at:1371: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stdout: +1.3: syntax error, unexpected number +stdout: +494. calc.at:1368: ./calc.at:1389: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + + ok +./calc.at:1393: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -88112,17 +82367,384 @@ || /\t/ )' calc.c -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +514. calc.at:1405: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... +./calc.at:1405: mv calc.y.tmp calc.y + +./calc.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +./calc.at:1403: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1401: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1402: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +input: + | 1 2 + | 1 2 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1392: $PREPARSER ./calc input + | 1 2 + | (!!) + (1 2) = 1 stderr: -./calc.at:1387: cat stderr +./calc.at:1369: $PREPARSER ./calc input stderr: -./calc.at:1397: cat stderr -syntax error -syntax error -error: 2222 != 1 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; + | (# + 1) = 1111 +stderr: +./calc.at:1391: $PREPARSER ./calc input +stderr: +./calc.at:1364: $PREPARSER ./calc input +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +error: null divisor +515. calc.at:1407: testing Calculator %glr-parser %debug ... +./calc.at:1370: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88132,32 +82754,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: input: - | (* *) + (*) + (*) -./calc.at:1387: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1393: $PREPARSER ./calc input +1.3: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -88171,28 +82792,110 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1398: $PREPARSER ./calc input -input: -input: -stderr: -stderr: - | 1 + 2 * 3 + !* ++ - | 1 + 2 * 3 + !- ++ ./calc.at:1389: $PREPARSER ./calc input -syntax error -syntax error -syntax error -./calc.at:1395: cat stderr -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: $PREPARSER ./calc input -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1393: cat stderr +1.3: syntax error stderr: -1.14: memory exhausted -./calc.at:1394: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1407: mv calc.y.tmp calc.y + +syntax error +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88202,43 +82905,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error syntax error -syntax error -./calc.at:1392: cat stderr -input: -stderr: -stderr: -input: - | 1 2 -stderr: -input: -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -1.14: memory exhausted -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1392: $PREPARSER ./calc input -input: -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1393: $PREPARSER ./calc input +./calc.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -1.3: syntax error, unexpected number -./calc.at:1394: cat stderr +stdout: +./calc.at:1394: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + stderr: ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88248,142 +82934,216 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1405: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +input: +./calc.at:1363: cat stderr +stdout: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1397: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + stderr: stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -memory exhausted -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -1.3: syntax error, unexpected number -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (* *) + (*) + (*) stderr: -./calc.at:1394: $PREPARSER ./calc input -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1392: $PREPARSER ./calc input -syntax error -syntax error -syntax error -./calc.at:1389: cat stderr +./calc.at:1370: $PREPARSER ./calc input stderr: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1387: cat stderr -524. calc.at:1432: testing Calculator C++ %locations ... -./calc.at:1432: mv calc.y.tmp calc.y - -./calc.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: cat stderr stderr: -input: -syntax error -syntax error +./calc.at:1375: cat stderr +1.3: syntax error +error: null divisor +1.3: syntax error +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error - | 1 + 2 * 3 + !* ++ +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: cat stderr stderr: -./calc.at:1398: cat stderr -./calc.at:1391: cat stderr -./calc.at:1389: $PREPARSER ./calc input input: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (#) + (#) = 2222 -memory exhausted -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1395: cat stderr +stdout: stderr: -./calc.at:1393: cat stderr -./calc.at:1397: $PREPARSER ./calc input -memory exhausted + | (# + 1) = 1111 stderr: +./calc.at:1363: $PREPARSER ./calc input +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1395: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. +stderr: + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +stdout: +./types.at:139: ./check input: input: - | 1 + 2 * 3 + !+ ++ - | 1 + 2 * 3 + !* ++ -input: -./calc.at:1387: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1391: $PREPARSER ./calc input - | 1//2 -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (!!) + (1 2) = 1 -./calc.at:1392: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1397: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88393,16 +83153,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: $PREPARSER ./calc input +./calc.at:1407: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS input: -stderr: -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (#) + (#) = 2222 -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88412,21 +83165,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.14: memory exhausted -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number -error: 2222 != 1 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88436,22 +83177,110 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 2 stderr: stderr: +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -1.14: memory exhausted -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 - | 1 + 2 * 3 + !- ++ -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1392: cat stderr -./calc.at:1394: cat stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1371: cat stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88462,14 +83291,11 @@ }eg ' expout || exit 77 stderr: -./calc.at:1389: cat stderr -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: input: input: - | 1 + 2 * 3 + !* ++ -stdout: ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88480,68 +83306,113 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1403: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1392: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1389: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: cat stderr stderr: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1397: cat stderr -./calc.at:1398: cat stderr -./calc.at:1432: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -memory exhausted +./calc.at:1375: $PREPARSER ./calc input stderr: -input: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' stderr: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: cat stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: + | 1 2 +./calc.at:1393: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -88555,77 +83426,119 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1395: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1360: $PREPARSER ./calc input +syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: cat stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: $PREPARSER ./calc input -./calc.at:1393: cat stderr -input: -./calc.at:1391: cat stderr -input: - | 1 + 2 * 3 + !- ++ -stderr: -stderr: - | error -./calc.at:1394: $PREPARSER ./calc input -input: -stderr: -./calc.at:1398: $PREPARSER ./calc input -memory exhausted -syntax error: invalid character: '#' -syntax error: invalid character: '#' - | (1 + #) = 1111 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -input: -./calc.at:1397: $PREPARSER ./calc input -1.1: syntax error, unexpected invalid token - | (#) + (#) = 2222 -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stderr: - | (1 + #) = 1111 -./types.at:139: $PREPARSER ./test -./calc.at:1391: $PREPARSER ./calc input -stderr: - | (- *) + (1 2) = 1 -1.6: syntax error: invalid character: '#' -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1387: cat stderr -stderr: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: input: -./calc.at:1400: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h -stderr: - -./calc.at:1395: $PREPARSER ./calc input - | 1 2 -stderr: -stderr: -stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88635,11 +83548,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: +input: + | 1 = 2 = 3 +./calc.at:1371: $PREPARSER ./calc input + | 1 2 my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" @@ -88647,61 +83559,158 @@ }eg ' expout || exit 77 stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1403: $PREPARSER ./calc input -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error, unexpected invalid token +stderr: +./calc.at:1397: $PREPARSER ./calc input stderr: ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' input: -syntax error: invalid character: '#' -stderr: - | 1 + 2 * 3 + !* ++ -stderr: -./calc.at:1387: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -1.3: syntax error, unexpected number -./calc.at:1400: $PREPARSER ./calc input -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1392: cat stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88711,39 +83720,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: cat stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: cat stderr +./calc.at:1387: cat stderr + | 1 2 +./calc.at:1394: $PREPARSER ./calc input + | (- *) + (1 2) = 1 stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: $PREPARSER ./calc input stderr: -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -memory exhausted -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1398: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +1.3: syntax error +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1391: cat stderr +./calc.at:1390: cat stderr input: -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88753,128 +83749,242 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error, unexpected number +./calc.at:1392: cat stderr +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: cat stderr +input: +stderr: +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +syntax error + | (!!) + (1 2) = 1 +493. calc.at:1367: ./calc.at:1370: $PREPARSER ./calc input +stderr: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: -./calc.at:1389: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1353: $PREPARSER ./calc input | 1 2 -./calc.at:1397: cat stderr -./calc.at:1400: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1391: cat stderr -./calc.at:1392: $PREPARSER ./calc input -memory exhausted + ok +syntax error +./calc.at:1395: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 + | 1//2 +./calc.at:1390: $PREPARSER ./calc input stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; +stderr: +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +1.3: syntax error + +stderr: +stdout: +./calc.at:1398: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +516. calc.at:1408: testing Calculator %glr-parser parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... +./calc.at:1408: mv calc.y.tmp calc.y + +./calc.at:1408: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +input: +input: +./calc.at:1374: cat stderr +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1394: cat stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stderr: -input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' - | 1 = 2 = 3 -./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1363: cat stderr input: -./calc.at:1398: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -input: - | (1 + #) = 1111 -input: -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1395: cat stderr -stderr: -./calc.at:1393: cat stderr -./calc.at:1389: $PREPARSER ./calc input -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) - | 1 + 2 * 3 + !* ++ -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: $PREPARSER ./calc input -stderr: -./calc.at:1403: cat stderr - | (1 + #) = 1111 -syntax error: invalid character: '#' -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -memory exhausted -1.2: syntax error: invalid character: '#' -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error, unexpected '=' -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +./calc.at:1364: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1//2 + | 1//2 stderr: -stderr: -syntax error: invalid character: '#' -input: -input: -1.6: syntax error: invalid character: '#' -input: -./calc.at:1400: cat stderr -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -stderr: -stderr: -stdout: - | (# + 1) = 1111 | 1//2 -./calc.at:1393: $PREPARSER ./calc input +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: $PREPARSER ./calc input stderr: -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1403: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1402: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -memory exhausted -input: stderr: stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88884,23 +83994,159 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 -stderr: +syntax error, unexpected number +1.3: syntax error +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.6: syntax error: invalid character: '#' ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1398: cat stderr -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1387: cat stderr -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: stderr: +syntax error | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -88914,7 +84160,14 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1389: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1398: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +input: +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88924,19 +84177,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1402: $PREPARSER ./calc input -stderr: -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +input: +./calc.at:1375: cat stderr +input: +./calc.at:1408: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +input: +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: cat stderr + | (1 + # + 1) = 1111 + | error + | (1 + 1) / (1 - 1) + | (1 + # + 1) = 1111 stderr: -stdout: +./calc.at:1374: $PREPARSER ./calc input ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88947,147 +84201,992 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $PREPARSER ./test -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -syntax error: invalid character: '#' -input: stderr: +stderr: +./calc.at:1364: $PREPARSER ./calc input +stderr: +======== Testing with C++ standard flags: '' +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1363: $PREPARSER ./calc input +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1362: $PREPARSER ./calc input +1.3: syntax error +syntax error, unexpected number +1.3: syntax error ./calc.at:1389: cat stderr - | (#) + (#) = 2222 -./calc.at:1387: $PREPARSER ./calc input +syntax error +./calc.at:1393: cat stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: stderr: stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stdout: stderr: -./types.at:139: ./check - | - | +1 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1392: cat stderr - | 1 2 -./calc.at:1402: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -input: -./calc.at:1391: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1394: cat stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; +./calc.at:1369: cat stderr +./calc.at:1360: cat stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1370: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | error +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +489. calc.at:1360: ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1375: $PREPARSER ./calc input +syntax error ./calc.at:1397: cat stderr -2.1: syntax error, unexpected '+' -./calc.at:1393: "$PERL" -pi -e 'use strict; +stderr: +1.3: syntax error + +517. calc.at:1409: testing Calculator %glr-parser parse.error=verbose %debug %locations %header api.prefix={calc} api.token.prefix={TOK_} %verbose ... +./calc.at:1409: mv calc.y.tmp calc.y + +./calc.at:1409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: +input: +input: +input: +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1400: cat stderr input: -./calc.at:1395: cat stderr + | 1//2 + | 1//2 +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1389: $PREPARSER ./calc input input: - | (#) + (#) = 2222 -1.3: syntax error, unexpected number - | (1 + #) = 1111 -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1394: $PREPARSER ./calc input -stderr: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' + | (- *) + (1 2) = 1 + | 1 2 +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1398: $PREPARSER ./calc input stderr: -input: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' - | (1 + # + 1) = 1111 -./calc.at:1397: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: + | + | +1 stderr: -2.1: syntax error, unexpected '+' -syntax error: invalid character: '#' -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1371: $PREPARSER ./calc input + | (* *) + (*) + (*) +1.1: syntax error, unexpected invalid token +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: -stderr: -stderr: -1.3: syntax error, unexpected number - | (# + 1) = 1111 -stderr: -./calc.at:1403: cat stderr -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1394: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89097,21 +85196,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1390: cat stderr stderr: -./calc.at:1393: cat stderr - | 1 + 2 * 3 + !+ ++ -syntax error: invalid character: '#' -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1395: $PREPARSER ./calc input stderr: - | error -1.6: syntax error: invalid character: '#' -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1402: "$PERL" -pi -e 'use strict; +stderr: + | 1//2 +./calc.at:1397: $PREPARSER ./calc input +stderr: +syntax error +syntax error +1.3: syntax error, unexpected number +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1: syntax error, unexpected invalid token +stderr: +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: cat stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89121,9 +85231,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1409: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89133,26 +85242,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -stderr: -stderr: - | (1 + # + 1) = 1111 -./calc.at:1393: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./calc.at:1387: cat stderr -1.6: syntax error: invalid character: '#' -stderr: - | error -./calc.at:1403: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89162,11 +85255,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1402: cat stderr -syntax error: invalid character: '#' -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error stderr: -./calc.at:1394: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1400: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89176,8 +85280,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1398: cat stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; +input: +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89187,16 +85291,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error: invalid character: '#' -input: -1.1: syntax error, unexpected invalid token -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + #) = 1111 -./calc.at:1398: $PREPARSER ./calc /dev/null - | 1 + 2 * 3 + !- ++ -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89206,10 +85302,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1400: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +input: +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89220,37 +85314,44 @@ }eg ' expout || exit 77 stderr: + | 1//2 +./calc.at:1394: $PREPARSER ./calc input + | error +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1390: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: cat stderr -stderr: -./calc.at:1394: cat stderr -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -syntax error: invalid character: '#' -1.1: syntax error, unexpected end of input -./calc.at:1397: cat stderr -1.1: syntax error, unexpected invalid token -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -input: -./calc.at:1392: cat stderr - | (1 + # + 1) = 1111 +syntax error +syntax error +1.3: syntax error, unexpected number stderr: stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) input: - | 1//2 -./calc.at:1402: $PREPARSER ./calc input - | 1 = 2 = 3 - | (1 + #) = 1111 -./calc.at:1394: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1400: $PREPARSER ./calc input -./calc.at:1389: $PREPARSER ./calc input -1.1: syntax error, unexpected end of input -./calc.at:1393: "$PERL" -pi -e 'use strict; +input: +./calc.at:1353: cat stderr +./calc.at:1363: cat stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89260,31 +85361,47 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: cat stderr -input: +./calc.at:1362: cat stderr +./calc.at:1364: cat stderr stderr: + | 1//2 stderr: +./calc.at:1395: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) - | (1 + 1) / (1 - 1) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1397: $PREPARSER ./calc input -syntax error: invalid character: '#' +syntax error +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +1.3: syntax error stderr: stdout: -./calc.at:1395: "$PERL" -pi -e 'use strict; +./types.at:139: ./check +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: cat stderr +./calc.at:1392: cat stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +stderr: +stderr: +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: "$PERL" -ne ' +syntax error +./calc.at:1391: cat stderr +stderr: +stderr: +stdout: +stdout: +./calc.at:1403: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -89295,45 +85412,55 @@ || /\t/ )' calc.c calc.h -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +484. calc.at:1353: ./calc.at:1407: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + + ok +490. calc.at:1362: ok +stderr: + +stdout: +./types.at:139: $PREPARSER ./test + input: input: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 - | (1 + # + 1) = 1111 -stderr: -stderr: +input: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: cat stderr +./calc.at:1371: cat stderr +./calc.at:1370: cat stderr +./calc.at:1369: cat stderr +./calc.at:1375: cat stderr + | 1 2 + | (1 + 1) / (1 - 1) + | (1 + 1) / (1 - 1) +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc /dev/null stderr: -./calc.at:1391: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -1.11-17: error: null divisor -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1387: "$PERL" -pi -e 'use strict; +518. calc.at:1411: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... +./calc.at:1411: mv calc.y.tmp calc.y + +1.1: syntax error +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +519. calc.at:1413: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +input: +input: +./calc.at:1413: mv calc.y.tmp calc.y + +input: +input: +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89343,13 +85470,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: cat stderr -stderr: -./calc.at:1395: cat stderr -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) input: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -89363,884 +85484,330 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1408: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -stderr: -stderr: -./calc.at:1393: cat stderr -1.11-17: error: null divisor -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: cat stderr -stderr: -stdout: -stderr: -1.6: syntax error: invalid character: '#' -./types.at:139: ./check -syntax error: invalid character: '#' -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1407: $PREPARSER ./calc input my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1411: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +stderr: +stderr: +======== Testing with C++ standard flags: '' +./calc.at:1413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | error +./calc.at:1387: $PREPARSER ./calc input + | error + | error +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 23 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -input: -input: -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1392: $PREPARSER ./calc input ./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90251,16 +85818,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1398: cat stderr +stdout: +./calc.at:1402: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: input: input: +stderr: +stderr: +stderr: | 1 = 2 = 3 -./calc.at:1403: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ - | (1 + 1) / (1 - 1) -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1387: cat stderr -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1394: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !+ ++ +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +1.1: syntax error +syntax error +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90270,8 +85861,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1397: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90281,1236 +85878,301 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1400: cat stderr stderr: -memory exhausted +input: stderr: stderr: -./calc.at:1402: cat stderr stderr: -error: null divisor -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +syntax error Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 23 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 +Stack now 0 8 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 23 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -stderr: -1.7: syntax error, unexpected '=' -input: -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1394: cat stderr -input: - | 1 2 -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1408: $PREPARSER ./calc input -./calc.at:1389: cat stderr - | (# + 1) = 1111 -./calc.at:1387: $PREPARSER ./calc input -stderr: -./calc.at:1400: $PREPARSER ./calc input -stderr: -stderr: -input: -stderr: - | error -stderr: -stderr: -memory exhausted -1.7: syntax error, unexpected '=' -./calc.at:1402: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -syntax error: invalid character: '#' -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: cat stderr - | (# + 1) = 1111 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -error: null divisor -1.1: syntax error, unexpected invalid token -stderr: -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1392: cat stderr -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1391: cat stderr -stderr: -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -508. calc.at:1397: ok -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: -stderr: -stderr: -error: null divisor -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) - | (1 + # + 1) = 1111 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -stderr: -input: -1.1: syntax error, unexpected invalid token -./calc.at:1403: cat stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -error: null divisor -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -stderr: -input: -./types.at:139: $PREPARSER ./test -./calc.at:1391: $PREPARSER ./calc input -syntax error: invalid character: '#' - | - | +1 -./calc.at:1403: $PREPARSER ./calc input -./calc.at:1395: cat stderr -stderr: -./calc.at:1400: cat stderr -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: cat stderr -stderr: -1.11-17: error: null divisor -stderr: -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: cat stderr -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1393: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -2.1: syntax error, unexpected '+' -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: cat stderr -./calc.at:1400: $PREPARSER ./calc /dev/null -======== Testing with C++ standard flags: '' -input: -input: -./calc.at:1394: cat stderr -./calc.at:1402: cat stderr - | 1//2 -stderr: -stderr: -505. calc.at:1393: ./calc.at:1408: $PREPARSER ./calc input - | (#) + (#) = 2222 -stderr: -input: -input: -./calc.at:1395: $PREPARSER ./calc input -2.1: syntax error, unexpected '+' -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.11-17: error: null divisor - ok -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 - | (!!) + (1 2) = 1 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -./calc.at:1398: $PREPARSER ./calc input -input: -./calc.at:1387: $PREPARSER ./calc input -stdout: - | (1 + # + 1) = 1111 -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1389: cat stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1401: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -syntax error: invalid character: '#' -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 - | 1 = 2 = 3 -stderr: -./calc.at:1402: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1400: cat stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -501. calc.at:1389: ok -syntax error: invalid character: '#' -stderr: -stderr: +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -91524,634 +86186,11 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1392: cat stderr -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -525. calc.at:1433: testing Calculator C++ %locations $NO_EXCEPTIONS_CXXFLAGS ... -stderr: -syntax error: invalid character: '#' -./calc.at:1433: mv calc.y.tmp calc.y - -stderr: - -./calc.at:1403: cat stderr -input: -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error, unexpected '=' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1408: cat stderr -input: -stderr: -./calc.at:1391: cat stderr -stderr: -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1403: $PREPARSER ./calc /dev/null -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1392: $PREPARSER ./calc input - -1.7: syntax error, unexpected '=' -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.1: syntax error, unexpected end of input -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -error: null divisor -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1408: $PREPARSER ./calc input -./calc.at:1398: cat stderr -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1 2 -./calc.at:1395: cat stderr -./calc.at:1401: $PREPARSER ./calc input -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -error: null divisor -stderr: -503. calc.at:1391: 1.1: syntax error, unexpected end of input -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - ok -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1394: cat stderr -./calc.at:1402: cat stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: - | (- *) + (1 2) = 1 - | (1 + #) = 1111 -./calc.at:1398: $PREPARSER ./calc input -stderr: -./calc.at:1395: $PREPARSER ./calc input -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: -input: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -stderr: -input: -526. calc.at:1434: testing Calculator C++ %locations api.location.type={Span} ... -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -syntax error: invalid character: '#' -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1400: cat stderr -stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: cat stderr - -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1403: cat stderr -stderr: -./calc.at:1434: mv calc.y.tmp calc.y - -error: null divisor - | - | +1 -stderr: ./calc.at:1402: $PREPARSER ./calc input -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -input: -./calc.at:1408: cat stderr -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1401: cat stderr -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1434: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -2.1: syntax error, unexpected '+' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: cat stderr -stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -error: null divisor -input: - | 1 = 2 = 3 -input: -./calc.at:1408: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1400: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1403: $PREPARSER ./calc input -stderr: -input: -error: null divisor -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -527. calc.at:1435: testing Calculator C++ %header %locations parse.error=verbose %name-prefix "calc" %verbose ... -./calc.at:1435: mv calc.y.tmp calc.y - -stderr: -stderr: -./calc.at:1398: cat stderr -2.1: syntax error, unexpected '+' -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 - | 1//2 -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -504. calc.at:1392: ok -stdout: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: ./check -./calc.at:1433: $CXX $CPPFLAGS $CXXFLAGS $NO_EXCEPTIONS_CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -error: null divisor -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) 1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1395: cat stderr -./calc.at:1435: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1401: cat stderr -./calc.at:1394: cat stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -input: - | (# + 1) = 1111 -input: -input: -./calc.at:1400: cat stderr -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.1: syntax error +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) | (* *) + (*) + (*) - | error -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -506. calc.at:1394: ok -./calc.at:1401: $PREPARSER ./calc input - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -528. calc.at:1437: testing Calculator C++ %locations parse.error=verbose api.prefix={calc} %verbose ... -./calc.at:1437: mv calc.y.tmp calc.y - -./calc.at:1398: $PREPARSER ./calc input -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: cat stderr -./calc.at:1402: cat stderr -stderr: -input: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1403: cat stderr -stderr: -stderr: -./calc.at:1387: cat stderr - | (- *) + (1 2) = 1 - -syntax error: invalid character: '#' -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1402: $PREPARSER ./calc /dev/null -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -input: -stderr: -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 - | - | +1 -./calc.at:1408: $PREPARSER ./calc input -1.1: syntax error, unexpected end of file -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -500. calc.at:1387: ok - | (!!) + (1 2) = 1 -./calc.at:1403: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -529. calc.at:1438: testing Calculator C++ %locations parse.error=verbose %debug %name-prefix "calc" %verbose ... -./calc.at:1438: mv calc.y.tmp calc.y - -./calc.at:1438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - -stderr: -530. calc.at:1440: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} %verbose ... -stdout: -531. calc.at:1441: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} api.token.prefix={TOK_} %verbose ... -./calc.at:1441: mv calc.y.tmp calc.y - -./calc.at:1440: mv calc.y.tmp calc.y - -./calc.at:1441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1407: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1435: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1434: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -stderr: -stderr: -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -1.1: syntax error, unexpected end of file -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1407: $PREPARSER ./calc input -stdout: -./types.at:139: $PREPARSER ./test -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 -stderr: -./calc.at:1401: $PREPARSER ./calc input -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: cat stderr -./calc.at:1398: cat stderr -./calc.at:1440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1438: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -92989,19 +87028,32 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -======== Testing with C++ standard flags: '' +syntax error +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1389: cat stderr +./calc.at:1393: cat stderr +input: +./calc.at:1413: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: + | 1//2 +stderr: +./calc.at:1398: $PREPARSER ./calc input + | 1 2 +./calc.at:1403: $PREPARSER ./calc input +stderr: +stderr: +syntax error stderr: +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Reading a token @@ -93838,16 +87890,37 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1395: cat stderr +./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: cat stderr + | error +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1400: cat stderr stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +syntax error +1.3: syntax error, unexpected number +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: stdout: -./calc.at:1408: cat stderr stdout: -input: -input: -./calc.at:1405: "$PERL" -ne ' +stderr: +./calc.at:1401: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -93858,9 +87931,7 @@ || /\t/ )' calc.c calc.h -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1408: $PREPARSER ./calc /dev/null -./calc.at:1409: "$PERL" -ne ' +./calc.at:1405: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -93871,48 +87942,89 @@ || /\t/ )' calc.c calc.h +1.7: syntax error, unexpected '=' input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1402: cat stderr +input: +input: +./calc.at:1394: cat stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1411: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +stderr: | 1 2 -./calc.at:1398: $PREPARSER ./calc input + | 1 2 + | error + | error + | 1 + 2 * 3 + !- ++ +./calc.at:1402: $PREPARSER ./calc input ./calc.at:1407: $PREPARSER ./calc input +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1389: $PREPARSER ./calc input +stderr: +./calc.at:1371: cat stderr +1.1: syntax error +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: input: -./calc.at:1403: cat stderr stderr: -stdout: stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) - | (* *) + (*) + (*) -input: stderr: stderr: -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1401: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.1: 2) | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -93926,29 +88038,8 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1411: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1402: $PREPARSER ./calc input ./calc.at:1405: $PREPARSER ./calc input -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -93963,32 +88054,46 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 - | (- *) + (1 2) = 1 -./calc.at:1409: $PREPARSER ./calc input +./calc.at:1401: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.1: 2) +syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1363: cat stderr stderr: -input: +./calc.at:1395: cat stderr +1.1: syntax error +./calc.at:1364: cat stderr stderr: stderr: - | - | +1 stderr: -./calc.at:1403: $PREPARSER ./calc input -syntax error: invalid character: '#' -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1401: $PREPARSER ./calc input +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' stderr: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error, unexpected number +stderr: +stdout: +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected number Starting parse Entering state 0 Reading a token @@ -94004,31 +88109,314 @@ syntax error Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.1: 2) -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./types.at:139: $PREPARSER ./test +syntax error +input: +input: +input: +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: cat stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1391: cat stderr + | 1//2 + | error +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1400: $PREPARSER ./calc input stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +491. calc.at:1363: ok +1.3: syntax error, unexpected number + | 1 = 2 = 3 +stderr: +./calc.at:1390: $PREPARSER ./calc input +stderr: +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stdout: +stderr: +stderr: +./types.at:139: ./check +stderr: + +syntax error +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +492. calc.at:1364: ok +./calc.at:1369: cat stderr + +520. calc.at:1414: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1414: mv calc.y.tmp calc.y + +./calc.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +521. calc.at:1416: testing Calculator %glr-parser %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1416: mv calc.y.tmp calc.y + +./calc.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +input: +input: +./calc.at:1392: cat stderr +input: +./calc.at:1387: cat stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 2 + | error +stderr: + | 1 2 +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1395: $PREPARSER ./calc input +stderr: +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1405: $PREPARSER ./calc input +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +1.7: syntax error +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +======== Testing with C++ standard flags: '' +syntax error +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 + | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !+ ++ +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1375: cat stderr +./calc.at:1370: $PREPARSER ./calc input +stderr: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +syntax error, unexpected invalid token +======== Testing with C++ standard flags: '' +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1374: cat stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1398: cat stderr input: +./calc.at:1403: cat stderr +./calc.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' + | 1 = 2 = 3 + | 1 = 2 = 3 +stderr: +stderr: +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1392: $PREPARSER ./calc input +1.14: memory exhausted +1.7: syntax error +stderr: +stderr: +stderr: +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.1-46: error: 4444 != 1 +1.7: syntax error +syntax error +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1402: cat stderr + | + | +1 +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 stderr: +stderr: +stderr: +stderr: +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +stdout: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +syntax error +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +syntax error, unexpected invalid token +syntax error +./calc.at:1397: cat stderr +./calc.at:1407: cat stderr +stderr: +./types.at:139: $PREPARSER ./test + | error +./calc.at:1398: $PREPARSER ./calc input +stdout: +stderr: +stderr: +./calc.at:1408: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +2.1: syntax error, unexpected '+' +1.7: syntax error +input: +./calc.at:1400: cat stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 + | 1//2 +stderr: +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1374: $PREPARSER ./calc input +stderr: +1.1: syntax error, unexpected invalid token +stderr: +stderr: +1.14: memory exhausted +input: +./calc.at:1371: cat stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: cat stderr +input: +input: + | 1//2 +./calc.at:1402: $PREPARSER ./calc input +input: +stderr: +./calc.at:1401: cat stderr + | 1 = 2 = 3 +./calc.at:1397: $PREPARSER ./calc input +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -94043,6 +88431,90 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +stderr: +./calc.at:1408: $PREPARSER ./calc input +2.1: syntax error, unexpected '+' +syntax error + | 1//2 +./calc.at:1407: $PREPARSER ./calc input +stderr: +./calc.at:1393: cat stderr +./calc.at:1405: cat stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected invalid token +stdout: +syntax error +./types.at:139: ./check +stderr: +syntax error +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +stderr: + | error + | 1 + 2 * 3 + !- ++ +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Reading a token +Next token is token '/' (1.1: ) +syntax error +Error: popping token '/' (1.1: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.1: ) +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: $PREPARSER ./calc input +1.7: syntax error + | (!!) + (1 2) = 1 +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1369: cat stderr +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +stderr: +stderr: +syntax error +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: Starting parse Entering state 0 Reading a token @@ -94342,14 +88814,7 @@ Shifting token ')' (5.4: ) Entering state 27 Reducing stack 0 by rule 13 (line 125): - $1 = token ' | 1 + 2 * 3 + !- ++ -./calc.at:1411: $PREPARSER ./calc input -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stderr: -(' (5.1: ) + $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) @@ -94886,40 +89351,22 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1400: cat stderr +stderr: +stderr: +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: $PREPARSER ./test +input: +input: +input: +./calc.at:1390: cat stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -94930,12 +89377,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: cat stderr -input: -./calc.at:1402: cat stderr -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr + | 1//2 + | 1 = 2 = 3 + | 1//2 + | 1 = 2 = 3 +stderr: +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1389: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1401: $PREPARSER ./calc input stderr: +1.7: syntax error Starting parse Entering state 0 Reading a token @@ -95235,10 +89689,7 @@ Shifting token ')' (5.4: ) Entering state 27 Reducing stack 0 by rule 13 (line 125): - $1 = token ' | 1 2 -./calc.at:1405: $PREPARSER ./calc input -stderr: -(' (5.1: ) + $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) @@ -95775,12 +90226,11 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -input: stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1394: cat stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -95790,19 +90240,284 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: $PREPARSER ./calc input input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | + | +1 +stderr: +./calc.at:1390: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Reading a token +Next token is token '/' (1.1: ) +syntax error +Error: popping token '/' (1.1: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.1: ) + | (#) + (#) = 2222 +./calc.at:1369: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stdout: +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +stdout: +stdout: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: ./check +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +syntax error +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr input: +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1402: $PREPARSER ./calc input +./calc.at:1395: cat stderr | 1 2 -./calc.at:1401: $PREPARSER ./calc /dev/null - | 1 + 2 * 3 + !+ ++ +stderr: +stderr: +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1408: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' +stderr: +stderr: +stderr: +stderr: +stderr: +======== Testing with C++ standard flags: '' +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +2.1: syntax error +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1375: cat stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +syntax error +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stderr: +stderr: +stderr: +stdout: +stdout: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1409: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./types.at:139: $PREPARSER ./test +./calc.at:1370: cat stderr +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1398: cat stderr +./calc.at:1403: cat stderr +./calc.at:1371: cat stderr + | 1 = 2 = 3 + | + | +1 +./calc.at:1395: $PREPARSER ./calc input +stderr: +======== Testing with C++ standard flags: '' + | 1 = 2 = 3 +stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +input: +stderr: +input: +======== Testing with C++ standard flags: '' +./calc.at:1375: $PREPARSER ./calc /dev/null +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1391: $PREPARSER ./calc input +stderr: +2.1: syntax error +2.1: syntax error + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1409: $PREPARSER ./calc input -./calc.at:1400: $PREPARSER ./calc input +./calc.at:1394: $PREPARSER ./calc input +syntax error + | 1 + 2 * 3 + !* ++ +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1397: cat stderr stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; +stdout: +input: +./calc.at:1400: cat stderr +./types.at:139: ./check +input: +./calc.at:1401: cat stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -95812,7 +90527,51 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1374: cat stderr +./calc.at:1392: cat stderr +./calc.at:1387: cat stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: +./calc.at:1402: cat stderr +stderr: +stderr: + | (- *) + (1 2) = 1 + | 1 = 2 = 3 + | error +syntax error +./calc.at:1405: cat stderr +./calc.at:1403: $PREPARSER ./calc input +stdout: +1.1: syntax error, unexpected end of file +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc input +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +======== Testing with C++ standard flags: '' +stderr: stderr: +2.1: syntax error +stderr: +1.14: memory exhausted Starting parse Entering state 0 Reading a token @@ -95849,14 +90608,14 @@ Entering state 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -95931,20 +90690,20 @@ Entering state 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) Entering state 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -95967,7 +90726,7 @@ Entering state 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -96031,14 +90790,14 @@ Entering state 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -96060,7 +90819,7 @@ Entering state 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -96103,7 +90862,7 @@ Entering state 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -96111,7 +90870,7 @@ Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -96131,7 +90890,7 @@ Entering state 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -96204,19 +90963,19 @@ Entering state 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -96238,7 +90997,7 @@ Entering state 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -96298,7 +91057,7 @@ Entering state 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -96317,7 +91076,7 @@ Entering state 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -96340,7 +91099,7 @@ Entering state 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -96403,7 +91162,7 @@ Entering state 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -96412,7 +91171,7 @@ Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -96420,7 +91179,7 @@ Entering state 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -96505,14 +91264,14 @@ Entering state 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) Entering state 33 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -96576,7 +91335,7 @@ Entering state 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -96585,7 +91344,7 @@ Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -96605,7 +91364,7 @@ Entering state 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -96649,267 +91408,84 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1374: $PREPARSER ./calc /dev/null +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error, unexpected end of file +syntax error, unexpected '=' +stdout: +./types.at:139: $PREPARSER ./test +input: +input: +input: +input: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: + | error + | + | +1 + | error +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +stderr: + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 + | + | +1 + | + | +1 +./calc.at:1405: $PREPARSER ./calc input +stderr: +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1392: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1397: $PREPARSER ./calc input + | error stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +1.1: syntax error, unexpected invalid token +./calc.at:1401: $PREPARSER ./calc input +syntax error stderr: +1.7: syntax error, unexpected '=' Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) @@ -96936,14 +91512,14 @@ Entering state 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -97018,20 +91594,20 @@ Entering state 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) Entering state 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -97054,7 +91630,7 @@ Entering state 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -97118,14 +91694,14 @@ Entering state 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -97147,7 +91723,7 @@ Entering state 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -97190,7 +91766,7 @@ Entering state 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -97198,7 +91774,7 @@ Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -97218,7 +91794,7 @@ Entering state 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -97291,19 +91867,19 @@ Entering state 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -97325,7 +91901,7 @@ Entering state 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -97385,7 +91961,7 @@ Entering state 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -97404,7 +91980,7 @@ Entering state 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -97427,7 +92003,7 @@ Entering state 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -97490,7 +92066,7 @@ Entering state 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -97499,7 +92075,7 @@ Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -97507,7 +92083,7 @@ Entering state 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -97592,14 +92168,14 @@ Entering state 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) Entering state 33 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -97663,7 +92239,7 @@ Entering state 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -97672,7 +92248,7 @@ Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -97692,7 +92268,7 @@ Entering state 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -97736,51 +92312,140 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: cat stderr +syntax error input: -./calc.at:1403: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1413: $PREPARSER ./calc input stderr: +stdout: +./types.at:139: ./check stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1411: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1369: cat stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1407: cat stderr +1.1: syntax error, unexpected invalid token stderr: - | 1 2 -./calc.at:1395: cat stderr stderr: -./calc.at:1411: $PREPARSER ./calc input -stdout: stderr: -./calc.at:1398: cat stderr stderr: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +2.1: syntax error +stderr: +syntax error +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +syntax error, unexpected '=' +stderr: +syntax error +syntax error +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example . c A A $end + First reduce derivation $accept -> [ a -> [ b -> [ . ] d -> [ c A A ] ] $end ] + Second example . c A A $end + Second reduce derivation $accept -> [ a -> [ c -> [ . ] d -> [ c A A ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 7.000000 + First example b . c A A $end + First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] + Second example b . A $end + Second reduce derivation $accept -> [ a -> [ b d -> [ c -> [ . ] A ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example c . c A A $end + First reduce derivation $accept -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] + Second example c . A $end + Second reduce derivation $accept -> [ a -> [ c d -> [ c -> [ . ] A ] ] $end ] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example b c . A + Shift derivation a -> [ b d -> [ c . A ] ] + Second example b c . c A A $end + Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example b c . c A A $end + First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] + Second example b c . A $end + Second reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + First example b c . A + Shift derivation a -> [ b d -> [ c . A ] ] + Second example b c . A $end + Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example b d . + First reduce derivation a -> [ b d . ] + Second reduce derivation a -> [ b d -> [ d . ] ] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example c d . + First reduce derivation a -> [ c d . ] + Second reduce derivation a -> [ c d -> [ d . ] ] +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y input: -./types.at:139: ./check +1.14: memory exhausted + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1411: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' stderr: - | 1//2 -./calc.at:1407: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -97792,245 +92457,69 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -98038,1414 +92527,764 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -input: -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + 1) / (1 - 1) -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | (* *) + (*) + (*) -./calc.at:1403: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1405: cat stderr -stderr: -stderr: -error: null divisor -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.1: ) -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -Starting parse -Entering state 0 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) - | 1 + 2 * 3 + !* ++ -Starting parse -Entering state 0 + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.1: ) -input: -stderr: - | 1 + 2 * 3 + !- ++ -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1401: cat stderr -./calc.at:1402: cat stderr -./calc.at:1409: cat stderr -./calc.at:1398: $PREPARSER ./calc input -stderr: -input: -error: null divisor -./calc.at:1400: $PREPARSER ./calc input - | 1//2 -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1408: cat stderr -stderr: -1.14: memory exhausted -stderr: -input: -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1//2 -./calc.at:1409: $PREPARSER ./calc input -input: -stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) - | (- *) + (1 2) = 1 -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -./calc.at:1402: $PREPARSER ./calc input -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1408: $PREPARSER ./calc input -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stderr: -stderr: -1.14: memory exhausted -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1411: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: cat stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: cat stderr -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: cat stderr -stderr: -./calc.at:1395: cat stderr -./calc.at:1407: cat stderr -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (5.11-6.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: - | 1//2 -input: -./calc.at:1411: $PREPARSER ./calc input -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -input: -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | error -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1405: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -stderr: -input: -Starting parse -Entering state 0 +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -507. calc.at:1395: ./calc.at:1400: $PREPARSER ./calc input - ok - | 1 + 2 * 3 + !+ ++ -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -./calc.at:1407: $PREPARSER ./calc input -stderr: -./calc.at:1403: $PREPARSER ./calc input -stderr: -stderr: -stderr: -./calc.at:1398: cat stderr -Starting parse -Entering state 0 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -1.14: memory exhausted -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -stderr: -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr -stderr: -stderr: -1.14: memory exhausted -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1411: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) - -./calc.at:1402: cat stderr -input: -stderr: -./calc.at:1405: cat stderr -./calc.at:1400: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1403: $PREPARSER ./calc input -stdout: -input: -stderr: - | error -./types.at:139: $PREPARSER ./test -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -input: - | 1 = 2 = 3 -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (#) + (#) = 2222 -./calc.at:1400: $PREPARSER ./calc input -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) - | (- *) + (1 2) = 1 -input: -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1405: cat stderr -./calc.at:1411: cat stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: cat stderr -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -input: -532. calc.at:1443: testing Calculator C++ %header %locations parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1443: mv calc.y.tmp calc.y - -./calc.at:1400: cat stderr -input: -input: - | 1 = 2 = 3 - | 1 + 2 * 3 + !* ++ -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1403: $PREPARSER ./calc input - | - | +1 -./calc.at:1405: $PREPARSER ./calc input -stderr: -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -1.14: memory exhausted -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -input: -./calc.at:1401: cat stderr -stderr: -./calc.at:1409: cat stderr -input: - | error -./calc.at:1409: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.14: memory exhausted - | (1 + #) = 1111 -stderr: -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1409: cat stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -======== Testing with C++ standard flags: '' -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1401: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -input: -stderr: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 = 2 = 3 -./calc.at:1409: $PREPARSER ./calc input -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1411: cat stderr -./calc.at:1405: cat stderr -./calc.at:1403: cat stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -1.6: syntax error: invalid character: '#' -stderr: -stderr: -stdout: -Starting parse -Entering state 0 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1405: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1431: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -stderr: -Starting parse -Entering state 0 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -input: -stderr: - | - | +1 -stderr: -stdout: -./calc.at:1400: cat stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: -./calc.at:1414: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stdout: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -./calc.at:1411: $PREPARSER ./calc input -stderr: - | (#) + (#) = 2222 -stderr: -./calc.at:1403: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (12.12-13.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1401: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: -input: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1400: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1409: cat stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -stderr: -input: -stderr: -./calc.at:1405: cat stderr -./calc.at:1407: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (* *) + (*) + (*) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1443: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -input: -./calc.at:1401: $PREPARSER ./calc input - | - | +1 -./calc.at:1409: $PREPARSER ./calc input -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1414: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1405: $PREPARSER ./calc input -stderr: -stderr: -stderr: +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 = 2 = 3 -stderr: -stdout: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -99455,9 +93294,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1409: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.7: syntax error, unexpected '=' +1.1: syntax error, unexpected invalid token +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +stderr: +stderr: +stdout: +stdout: +./types.at:139: ./check +./types.at:139: ./check +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -99467,15 +93327,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS input: -./calc.at:1400: cat stderr -./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -99485,26 +93340,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stdout: - | 1 2 stderr: stderr: -./calc.at:1431: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -100342,130 +94182,63 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stderr: -./calc.at:1416: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1408: cat stderr + | (1 + #) = 1111 +./calc.at:1369: $PREPARSER ./calc input +syntax error Starting parse Entering state 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): +Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.1: ) -syntax error -Error: popping nterm exp (1.1: 2) -Error: popping token '=' (1.1: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1413: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1402: $PREPARSER ./calc input -./calc.at:1411: cat stderr +Cleanup: discarding lookahead token "number" (1.3: 2) stderr: -input: stderr: stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error -input: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: cat stderr stderr: stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1408: cat stderr stdout: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1416: $PREPARSER ./calc input +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) stdout: -input: +stderr: +stderr: ./types.at:139: ./check -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: cat stderr +./types.at:139: ./check +2.1: syntax error +1.1: syntax error, unexpected invalid token +stdout: +./types.at:139: ./check +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +syntax error +270. counterexample.at:610: input: + ok +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: cat stderr +./calc.at:1393: cat stderr +./calc.at:1390: cat stderr +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y Starting parse Entering state 0 Reading a token @@ -100765,8 +94538,7 @@ Shifting token ')' (5.4: ) Entering state 27 Reducing stack 0 by rule 13 (line 113): - $1 = token '(' | (1 + # + 1) = 1111 - (5.1: ) + $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) @@ -101303,42 +95075,41 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.1: ) -syntax error -Error: popping nterm exp (1.1: 2) -Error: popping token '=' (1.1: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.1: ) - | (* *) + (*) + (*) -./calc.at:1400: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1375: cat stderr + | error +1.6: syntax error: invalid character: '#' +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1371: cat stderr +input: +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | 1//2 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1408: $PREPARSER ./calc input -./calc.at:1401: cat stderr -./calc.at:1411: $PREPARSER ./calc /dev/null -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -101348,8 +95119,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1390: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: cat stderr +./calc.at:1389: cat stderr +./calc.at:1391: cat stderr +input: + | 1 2 +./calc.at:1413: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +stdout: +./calc.at:1405: cat stderr Starting parse Entering state 0 Reading a token @@ -101649,7 +95456,17 @@ Shifting token ')' (5.4: ) Entering state 27 Reducing stack 0 by rule 13 (line 113): - $1 = token '('stderr: + $1 = token '('./types.at:139: ./check +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -102187,156 +96004,242 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -input: -input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error - | (!!) + (1 2) = 1 -./calc.at:1405: $PREPARSER ./calc input +./calc.at:1375: $PREPARSER ./calc input stderr: - | 1 2 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) + +Starting parse +Entering state 0 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +stderr: +./calc.at:1391: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 +Next token is token "invalid token" (1.1: ) +syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +1.6: syntax error: invalid character: '#' +stderr: +stdout: +./calc.at:1414: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +522. calc.at:1426: testing Calculator lalr1.cc %header ... +./calc.at:1426: mv calc.y.tmp calc.y + +./calc.at:1426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +stdout: +./types.at:139: ./check +input: +input: +./calc.at:1401: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1394: cat stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1403: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | + | +1 + | (* *) + (*) + (*) + | 1 2 +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | (#) + (#) = 2222 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1411: $PREPARSER ./calc input +stderr: + | + | +1 + | + | +1 +1.1: syntax error +stdout: +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: $PREPARSER ./calc input +stderr: +./calc.at:1395: cat stderr +./calc.at:1402: cat stderr +./calc.at:1398: cat stderr +input: +Starting parse +Entering state 0 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +stderr: +./calc.at:1414: $PREPARSER ./calc input +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./types.at:139: $PREPARSER ./test +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 stderr: -1.6: syntax error: invalid character: '#' -input: -input: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: cat stderr +stderr: +1.1: syntax error Starting parse Entering state 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) - | (1 + #) = 1111 - | 1 + 2 * 3 + !+ ++ +Next token is token "invalid token" (1.1: ) +syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +input: +input: +./calc.at:1374: cat stderr +input: +input: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | 1 = 2 = 3 + | 1 = 2 = 3 + | 1 = 2 = 3 +./calc.at:1403: $PREPARSER ./calc input +stderr: ./calc.at:1401: $PREPARSER ./calc input stderr: -./calc.at:1403: $PREPARSER ./calc input -./calc.at:1402: "$PERL" -pi -e 'use strict; +./calc.at:1405: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -102346,9 +96249,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -102365,12 +96267,27 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: +1.1: syntax error +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1387: cat stderr stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) + | + | +1 +syntax error +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./calc.at:1392: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -103207,128 +97124,72 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1409: $PREPARSER ./calc /dev/null -1.6: syntax error: invalid character: '#' +stderr: +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.7: syntax error, unexpected '=' Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) stderr: +stdout: +./calc.at:1416: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: +./calc.at:1397: cat stderr ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: cat stderr +input: + | + | +1 +./calc.at:1398: $PREPARSER ./calc input +stderr: +stderr: + | 1 = 2 = 3 +stderr: +stderr: +stderr: +stderr: +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1392: $PREPARSER ./calc /dev/null +./calc.at:1387: $PREPARSER ./calc /dev/null +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) stderr: +syntax error +syntax error +syntax error +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 stderr: Starting parse Entering state 0 @@ -103629,9 +97490,7 @@ Shifting token ')' (5.4: ) Entering state 27 Reducing stack 0 by rule 13 (line 113): - $1 = token '('./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - (5.1: ) + $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) @@ -104168,23 +98027,11 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -======== Testing with C++ standard flags: '' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) +stdout: +./types.at:139: ./check input: -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104194,8 +98041,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: "$PERL" -pi -e 'use strict; +input: +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1409: cat stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104206,35 +98066,55 @@ }eg ' expout || exit 77 stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (# + 1) = 1111 stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) + | + | +1 stderr: -1.6: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' - | 1 2 -./calc.at:1413: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1374: $PREPARSER ./calc input +syntax error +stderr: +2.1: syntax error, unexpected '+' +1.7: syntax error, unexpected '=' +./calc.at:1397: $PREPARSER ./calc /dev/null +1.7: syntax error, unexpected '=' input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +syntax error stderr: - | 1 + 2 * 3 + !+ ++ - | 1 + 2 * 3 + !- ++ ./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -104245,6 +98125,45 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: +input: +syntax error, unexpected '+' +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | 1 2 +stderr: +stderr: +./calc.at:1414: $PREPARSER ./calc input +stderr: +1.1: syntax error +stderr: +1.2: syntax error: invalid character: '#' +syntax error +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: Starting parse Entering state 0 Reading a token @@ -105081,85 +99000,19 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stderr: -./calc.at:1402: $PREPARSER ./calc input -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1431: cat stderr -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1371: cat stderr + | 1//2 +./calc.at:1409: $PREPARSER ./calc input stderr: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: cat stderr -./calc.at:1405: cat stderr -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: cat stderr - | 1 2 -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1414: cat stderr -./calc.at:1408: cat stderr -./calc.at:1411: cat stderr -input: stderr: stderr: -./calc.at:1403: cat stderr - | 1//2 -./calc.at:1431: $PREPARSER ./calc input stderr: -input: stderr: +1.7: syntax error, unexpected '=' Starting parse Entering state 0 Reading a token @@ -105175,89 +99028,17 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) -input: -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 syntax error -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: $PREPARSER ./calc input -input: -input: -input: - | - | +1 -input: -./calc.at:1407: $PREPARSER ./calc input -stderr: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1414: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Next token is token '+' (1.1: ) +2.1: syntax error, unexpected '+' syntax error -Error: popping nterm input (1.1: ) -Cleanup: discarding lookahead token '+' (1.1: ) -stderr: - | (- *) + (1 2) = 1 -./calc.at:1405: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1409: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1401: cat stderr -./calc.at:1408: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1400: $PREPARSER ./calc input -stderr: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (# + 1) = 1111 -./calc.at:1403: $PREPARSER ./calc input syntax error -./calc.at:1413: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: cat stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -105267,7 +99048,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1411: cat stderr +./calc.at:1390: cat stderr +./calc.at:1391: cat stderr +./calc.at:1408: cat stderr +input: stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1413: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -105288,52 +99077,10 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -stderr: -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) ./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stdout: stderr: stderr: -./types.at:139: ./check -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Cleanup: discarding lookahead token '+' (1.1: ) -1.11-17: error: null divisor +stderr: Starting parse Entering state 0 Reading a token @@ -105369,919 +99116,821 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 31 -Reading a token -Next token is token '*' (1.39: ) +Next token is token '=' (2.12: ) Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 30 -Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' (2.12: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error: invalid character: '#' -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: -stderr: -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -stderr: - | 1 + 2 * 3 + !* ++ -stderr: -stderr: -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -1.11-17: error: null divisor -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: cat stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -input: - | 1 + 2 * 3 + !- ++ -stderr: -./calc.at:1408: $PREPARSER ./calc input -stderr: +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +1.1: syntax error +syntax error, unexpected '+' +./calc.at:1400: $PREPARSER ./calc /dev/null +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1370: cat stderr ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -106292,7 +99941,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -106302,259 +99953,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 +input: +./calc.at:1375: cat stderr +input: +./calc.at:1407: cat stderr +stderr: + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1371: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1390: $PREPARSER ./calc input +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: $PREPARSER ./calc input +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) stderr: -1.14: memory exhausted -./calc.at:1402: cat stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -106566,62 +99998,74 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1401: cat stderr +input: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 2 +stderr: + | error +./calc.at:1416: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1391: $PREPARSER ./calc input +stderr: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -106632,8 +100076,40 @@ }eg ' expout || exit 77 input: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: "$PERL" -pi -e 'use strict; +input: +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -106643,22 +100119,56 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1403: cat stderr + | (1 + #) = 1111 + | + | +1 | 1//2 -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1370: $PREPARSER ./calc input + | 1 = 2 = 3 +stderr: +stderr: +./calc.at:1411: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1405: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1400: cat stderr -./calc.at:1414: cat stderr +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1375: $PREPARSER ./calc input +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +stderr: Starting parse Entering state 0 Reading a token @@ -106679,9 +100189,22 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1369: cat stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: cat stderr +./calc.at:1389: cat stderr input: -stderr: -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1402: cat stderr +./calc.at:1398: cat stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -106692,11 +100215,15 @@ }eg ' expout || exit 77 stderr: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1402: $PREPARSER ./calc input -input: stderr: +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | + | +1 +stderr: +./calc.at:1401: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -106708,60 +100235,96 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Error: popping token '=' (1.1: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.1: ) +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +stdout: +./types.at:139: ./check +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr + | 1 + 2 * 3 + !- ++ + | + | +1 +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +./calc.at:1393: $PREPARSER ./calc /dev/null +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +./calc.at:1389: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) Starting parse Entering state 0 Reading a token @@ -106773,45 +100336,84 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -1.14: memory exhausted -stderr: - | (1 + # + 1) = 1111 -510. calc.at:1400: ok -./calc.at:1403: $PREPARSER ./calc input -1.14: memory exhausted -stderr: -input: +Cleanup: discarding lookahead token "number" (1.3: 2) stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1398: $PREPARSER ./calc /dev/null input: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' - | (* *) + (*) + (*) - | error -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1397: cat stderr +./calc.at:1394: cat stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +input: ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: cat stderr -./calc.at:1409: cat stderr -./calc.at:1431: cat stderr +./calc.at:1400: cat stderr +./calc.at:1374: cat stderr +./calc.at:1392: cat stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: cat stderr +./calc.at:1387: cat stderr +stderr: + | (1 + # + 1) = 1111 +stderr: + | + | +1 +stderr: +stderr: +./calc.at:1369: $PREPARSER ./calc input +stderr: +stderr: +stderr: +./calc.at:1402: $PREPARSER ./calc input +stderr: +stderr: +syntax error +stderr: +syntax error +2.1: syntax error, unexpected '+' +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.1: syntax error, unexpected end of input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Error: popping token '=' (1.1: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.1: ) Starting parse Entering state 0 Reading a token @@ -106832,19 +100434,72 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: cat stderr stderr: stderr: +1.6: syntax error: invalid character: '#' + | 1//2 +./calc.at:1394: $PREPARSER ./calc /dev/null stderr: +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1395: $PREPARSER ./calc /dev/null stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -1.14: memory exhausted -./calc.at:1407: cat stderr -./calc.at:1408: cat stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +2.1: syntax error, unexpected '+' +1.1: syntax error, unexpected end of input +syntax error Starting parse Entering state 0 Reading a token @@ -106865,139 +100520,42 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) +stderr: +syntax error +stderr: +2.1: syntax error, unexpected '+' +syntax error +./calc.at:1371: cat stderr +input: input: +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1413: cat stderr input: +input: +input: +./calc.at:1408: cat stderr | (!!) + (1 2) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1374: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1397: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1400: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: + | error +./calc.at:1387: $PREPARSER ./calc input ./calc.at:1409: $PREPARSER ./calc input -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) 1.6: syntax error: invalid character: '#' -input: -stderr: +syntax error, unexpected end of input ./calc.at:1401: cat stderr - | error -./calc.at:1431: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: "$PERL" -pi -e 'use strict; +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107007,13 +100565,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1405: cat stderr -./calc.at:1407: $PREPARSER ./calc /dev/null -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1370: cat stderr +2.1: syntax error, unexpected '+' +input: +./calc.at:1390: cat stderr +./calc.at:1391: cat stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107023,255 +100582,100 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 + !* ++ +./calc.at:1416: cat stderr +stderr: +stderr: +./calc.at:1371: $PREPARSER ./calc input +stderr: +stderr: stderr: -input: stderr: Starting parse Entering state 0 Reading a token -Now at end of input. +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) +error: 2222 != 1 stderr: -stdout: - -./types.at:139: ./check +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected end of input +stderr: +stderr: +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error - | 1 + 2 * 3 + !* ++ stderr: -input: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: $PREPARSER ./calc input -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +syntax error +error: 2222 != 1 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1416: cat stderr - | (#) + (#) = 2222 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1398: $PREPARSER ./calc input +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +input: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -107282,10 +100686,75 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: cat stderr input: +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1411: cat stderr + | 1//2 +./calc.at:1416: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1375: cat stderr +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.14: memory exhausted +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (!!) + (1 2) = 1 + | error + | 1 = 2 = 3 + | (# + 1) = 1111 +./calc.at:1401: $PREPARSER ./calc /dev/null +./calc.at:1391: $PREPARSER ./calc input +stderr: +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1413: $PREPARSER ./calc input +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 stderr: +./calc.at:1405: $PREPARSER ./calc /dev/null +./calc.at:1369: cat stderr input: +./calc.at:1398: cat stderr Starting parse Entering state 0 Reading a token @@ -107297,66 +100766,72 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +stderr: +stderr: +stderr: +./calc.at:1400: cat stderr + | error +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1370: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +1.11: syntax error +1.1-16: error: 2222 != 1 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1 + 2 * 3 + !+ ++ + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1402: "$PERL" -pi -e 'use strict; +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107366,6 +100841,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1414: cat stderr +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: stderr: Starting parse Entering state 0 @@ -107373,22 +100856,21 @@ Next token is token "invalid token" (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token "invalid token" (1.1: ) -syntax error -stderr: -stderr: - | (#) + (#) = 2222 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -./calc.at:1403: cat stderr -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1403: cat stderr +input: +input: +input: ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -107399,13 +100881,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: - | error -./calc.at:1402: cat stderr input: -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1401: $PREPARSER ./calc input +./calc.at:1407: cat stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: cat stderr +./calc.at:1371: cat stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -107417,173 +100908,91 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) + | (- *) + (1 2) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (1 + 1) / (1 - 1) + | (!!) + (1 2) = 1 +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (!!) + (1 2) = 1 +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1398: $PREPARSER ./calc input +stderr: +./calc.at:1369: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: $PREPARSER ./calc /dev/null +stderr: +stderr: +1.11: syntax error 1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1402: $PREPARSER ./calc /dev/null +stderr: +1.2: syntax error: invalid character: '#' +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107593,22 +101002,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +./calc.at:1409: cat stderr +stderr: stderr: | error -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1409: cat stderr stderr: -input: stderr: +./calc.at:1414: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (1 + 1) / (1 - 1) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1403: $PREPARSER ./calc input -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +1.11-17: error: null divisor +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +1.1: syntax error, unexpected end of input +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 Starting parse Entering state 0 Reading a token @@ -107621,8 +101037,12 @@ Next token is token "invalid token" (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1414: cat stderr -./calc.at:1411: "$PERL" -pi -e 'use strict; +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +input: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107631,8 +101051,7 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg -' expout || exit 77 -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107642,9 +101061,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +' expout || exit 77 input: -stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107654,19 +101073,45 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: cat stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11-17: error: null divisor -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: $PREPARSER ./calc input +stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +1.1: syntax error, unexpected end of file +./calc.at:1393: cat stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (#) + (#) = 2222 +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1370: cat stderr +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1389: cat stderr stderr: -./calc.at:1407: "$PERL" -pi -e 'use strict; +stderr: +stderr: +1.1: syntax error, unexpected end of input +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +input: +./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107676,17 +101121,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1402: $PREPARSER ./calc input +input: +./calc.at:1416: cat stderr +./calc.at:1401: cat stderr +./calc.at:1394: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: $PREPARSER ./calc input stderr: stderr: -./calc.at:1408: cat stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; +======== Testing with C++ standard flags: '' +./calc.at:1405: cat stderr + | 1 = 2 = 3 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107697,32 +101166,53 @@ }eg ' expout || exit 77 stderr: -input: stderr: +stderr: +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1389: $PREPARSER ./calc input +1.11-17: error: null divisor +1.11: syntax error +1.1-16: error: 2222 != 1 Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Cleanup: discarding lookahead token '+' (1.1: ) +1.1: syntax error, unexpected end of file +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1392: cat stderr +./calc.at:1387: cat stderr +./calc.at:1374: cat stderr +./calc.at:1397: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1411: cat stderr - | 1 = 2 = 3 -./calc.at:1414: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1401: cat stderr -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1398: cat stderr -./calc.at:1431: cat stderr +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 stderr: -533. calc.at:1445: testing Calculator C++ parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1445: mv calc.y.tmp calc.y - Starting parse Entering state 0 Reading a token @@ -107752,137 +101242,49 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 input: -1.11-17: error: null divisor -./calc.at:1413: "$PERL" -pi -e 'use strict; +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +input: +input: +input: +./calc.at:1400: cat stderr ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./calc.at:1408: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (1 + # + 1) = 1111 + | error + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1416: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1393: $PREPARSER ./calc input +input: +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107892,142 +101294,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1407: cat stderr - | (- *) + (1 2) = 1 -./calc.at:1411: $PREPARSER ./calc input +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -input: -./calc.at:1398: $PREPARSER ./calc input - | (1 + #) = 1111 -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (#) + (#) = 2222 -stderr: -./calc.at:1413: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | 1 = 2 = 3 -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -108038,115 +101316,229 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: $PREPARSER ./calc input -stderr: -./calc.at:1431: $PREPARSER ./calc input +input: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1369: cat stderr +./calc.at:1391: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1413: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1405: $PREPARSER ./calc input ./calc.at:1401: $PREPARSER ./calc input -./calc.at:1416: cat stderr +stderr: +stderr: +stderr: +stderr: +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +1.6: syntax error: invalid character: '#' +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Starting parse +Entering state 0 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Cleanup: discarding lookahead token '+' (1.1: ) +input: +input: +input: +input: +input: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | (!!) + (1 2) = 1 + | 1 = 2 = 3 +./calc.at:1387: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1397: $PREPARSER ./calc input +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 + | (- *) + (1 2) = 1 +./calc.at:1374: $PREPARSER ./calc input + | (- *) + (1 2) = 1 + | (!!) + (1 2) = 1 +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 + | + | +1 +./calc.at:1400: $PREPARSER ./calc input +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1398: cat stderr +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +./calc.at:1403: cat stderr +syntax error +error: 2222 != 1 +Starting parse +Entering state 0 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +Starting parse +Entering state 0 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +syntax error +syntax error +error: 2222 != 1 +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +1.11: syntax error +1.1-16: error: 2222 != 1 +495. calc.at:1369: ok + +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +input: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: cat stderr +./calc.at:1402: cat stderr +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: cat stderr +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -108154,13 +101546,11 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1405: cat stderr -./calc.at:1402: "$PERL" -pi -e 'use strict; +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -108170,215 +101560,134 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1371: cat stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 + | (- *) + (1 2) = 1 +./calc.at:1391: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1413: $PREPARSER ./calc input stderr: stderr: stderr: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +syntax error +error: 2222 != 1 +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +syntax error +error: 2222 != 1 +stderr: +1.4: syntax error +1.12: syntax error 1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1402: cat stderr +stderr: +syntax error +syntax error +error: 2222 != 1 +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 stderr: stderr: -./calc.at:1403: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +1.11: syntax error +1.1-16: error: 2222 != 1 +523. calc.at:1431: testing Calculator C++ ... +./calc.at:1431: mv calc.y.tmp calc.y + +./calc.at:1431: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -108386,20 +101695,134 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +syntax error +error: 2222 != 1 input: +input: +input: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1390: $PREPARSER ./calc input +input: + | (* *) + (*) + (*) +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + #) = 1111 +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1414: cat stderr +./calc.at:1402: $PREPARSER ./calc input +stderr: + | (!!) + (1 2) = 1 +./calc.at:1409: cat stderr +./calc.at:1398: $PREPARSER ./calc input +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +stdout: +./types.at:139: ./check +./calc.at:1401: cat stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1431: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +./calc.at:1370: cat stderr +stderr: +stderr: +stderr: +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.6: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1413: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 Starting parse Entering state 0 Reading a token @@ -108429,10 +101852,42 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -./calc.at:1409: "$PERL" -pi -e 'use strict; +input: +./calc.at:1400: cat stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 + | 1 = 2 = 3 + | (!!) + (1 2) = 1 +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -108442,6 +101897,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1407: cat stderr +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1395: cat stderr +./calc.at:1411: cat stderr +./calc.at:1416: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: stderr: @@ -108474,376 +101936,56 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -input: -syntax error Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | 1 = 2 = 3 -./calc.at:1407: $PREPARSER ./calc input -input: -./calc.at:1416: $PREPARSER ./calc input -stderr: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) stderr: stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 3) -Shifting token error (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Error: popping nterm exp (1.1: 2) -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | 1 + 2 * 3 + !* ++ -1.6: syntax error: invalid character: '#' -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: $PREPARSER ./calc input stderr: -./calc.at:1409: cat stderr stderr: -1.14: memory exhausted - | (1 + #) = 1111 +./calc.at:1407: $PREPARSER ./calc /dev/null +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: $PREPARSER ./test +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -108853,328 +101995,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 3) -Shifting token error (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Error: popping nterm exp (1.1: 2) -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1401: cat stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +./calc.at:1394: cat stderr +./calc.at:1389: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -513. calc.at:1403: ./calc.at:1402: $PREPARSER ./calc input - ok -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -109184,8 +102014,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; +input: +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -109195,12 +102025,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | (!!) + (1 2) = 1 stderr: -1.14: memory exhausted -./calc.at:1411: cat stderr -1.6: syntax error: invalid character: '#' +input: + | (1 + 1) / (1 - 1) + | (* *) + (*) + (*) +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1400: $PREPARSER ./calc input stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; + | (!!) + (1 2) = 1 +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1408: cat stderr +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -109209,121 +102061,42 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: $PREPARSER ./calc input +input: +input: +./calc.at:1397: cat stderr +./calc.at:1393: cat stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + | 1 = 2 = 3 + | + | +1 +./calc.at:1416: $PREPARSER ./calc input stderr: -./calc.at:1405: cat stderr -input: -./calc.at:1431: cat stderr -./calc.at:1408: cat stderr +./calc.at:1411: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' +stderr: +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +syntax error, unexpected number +error: 2222 != 1 +stderr: +stderr: +======== Testing with C++ standard flags: '' +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1408: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -109331,16 +102104,63 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1414: $PREPARSER ./calc input -stderr: +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -109370,25 +102190,27 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -stdout: -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) input: -stdout: -./calc.at:1398: cat stderr +1.11-17: error: null divisor +./calc.at:1387: cat stderr +./calc.at:1391: cat stderr +input: +./calc.at:1413: cat stderr + | (!!) + (1 2) = 1 stderr: -./types.at:139: $PREPARSER ./test - | - | +1 -./calc.at:1431: $PREPARSER ./calc input +stderr: +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1392: cat stderr +stderr: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1374: cat stderr Starting parse Entering state 0 Reading a token @@ -109408,151 +102230,88 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -input: -stderr: -input: -./types.at:139: $PREPARSER ./test Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: +./calc.at:1389: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +syntax error, unexpected number +error: 2222 != 1 +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +stderr: input: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: cat stderr +./calc.at:1403: cat stderr +./calc.at:1375: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (1 + #) = 1111 +./calc.at:1393: $PREPARSER ./calc input stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1405: $PREPARSER ./calc input - | (# + 1) = 1111 -./calc.at:1408: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1411: $PREPARSER ./calc input stderr: +input: + | + | +1 +./calc.at:1413: $PREPARSER ./calc input + | (- *) + (1 2) = 1 + | (- *) + (1 2) = 1 +syntax error +error: 2222 != 1 +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1397: $PREPARSER ./calc input +syntax error +error: 2222 != 1 +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: cat stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; Starting parse Entering state 0 Reading a token @@ -109572,384 +102331,80 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +stderr: ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1390: cat stderr +./calc.at:1402: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +stderr: stderr: -input: -./calc.at:1407: cat stderr -./calc.at:1413: cat stderr stderr: +syntax error +syntax error +error: 2222 != 1 +syntax error +error: 2222 != 1 +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +./types.at:139: $PREPARSER ./test stderr: +1.11-17: error: null divisor Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +syntax error +error: 2222 != 1 +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error - | (# + 1) = 1111 -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -stderr: -======== Testing with C++ standard flags: '' -./calc.at:1398: $PREPARSER ./calc input -stderr: - -./calc.at:1416: cat stderr -./calc.at:1402: cat stderr -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) input: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: - | - | +1 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./types.at:139: $PREPARSER ./test +input: +input: +input: +input: +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: cat stderr ./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -109960,17 +102415,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | (* *) + (*) + (*) + | (# + 1) = 1111 + | (- *) + (1 2) = 1 + | (* *) + (*) + (*) +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1398: $PREPARSER ./calc input + | (- *) + (1 2) = 1 + | 1 + 2 * 3 + !+ ++ +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1413: $PREPARSER ./calc input stderr: stderr: -input: - | - | +1 -1.2: syntax error: invalid character: '#' + | (!!) + (1 2) = 1 stderr: -input: -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1405: cat stderr +./calc.at:1401: cat stderr Starting parse Entering state 0 Reading a token @@ -109990,9 +102456,47 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) +syntax error +error: 2222 != 1 +input: +input: stderr: - | (# + 1) = 1111 -./calc.at:1431: "$PERL" -pi -e 'use strict; +stderr: +input: +./calc.at:1414: cat stderr +stderr: +stderr: +stderr: +stderr: +stderr: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) + | (!!) + (1 2) = 1 +1.2: syntax error +1.10: syntax error +1.16: syntax error +syntax error +syntax error +syntax error +./calc.at:1390: $PREPARSER ./calc input +stderr: +syntax error +syntax error +error: 2222 != 1 + | 1 + 2 * 3 + !+ ++ +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: cat stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -110003,20 +102507,33 @@ }eg ' expout || exit 77 1.2: syntax error: invalid character: '#' -./calc.at:1402: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1407: $PREPARSER ./calc input -input: -stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +syntax error +error: 2222 != 1 +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +syntax error +syntax error +error: 2222 != 1 my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +' expout || exit 77 +stderr: +======== Testing with C++ standard flags: '' +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error Starting parse Entering state 0 Reading a token @@ -110036,169 +102553,24 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1405: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1401: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: cat stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: cat stderr -======== Testing with C++ standard flags: '' -input: -stderr: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -110208,6 +102580,54 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1370: cat stderr +input: +stderr: + | + | +1 + | (- *) + (1 2) = 1 + | (- *) + (1 2) = 1 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./calc.at:1395: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +syntax error +syntax error +syntax error +stderr: +./calc.at:1405: $PREPARSER ./calc input +stderr: +stderr: +syntax error +syntax error +error: 2222 != 1 +stderr: +stderr: +stderr: +stderr: +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +stderr: Starting parse Entering state 0 Reading a token @@ -110227,260 +102647,79 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -======== Testing with C++ standard flags: '' -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) -Entering state 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -input: stderr: - | (1 + # + 1) = 1111 -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +./calc.at:1409: $PREPARSER ./calc /dev/null +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +1.2: syntax error +1.10: syntax error +1.16: syntax error 1.2: syntax error: invalid character: '#' - | (1 + #) = 1111 -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1408: "$PERL" -pi -e 'use strict; +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +496. calc.at:1370: ok + +524. calc.at:1432: testing Calculator C++ %locations ... +./calc.at:1432: mv calc.y.tmp calc.y + +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1414: $PREPARSER ./calc /dev/null -./calc.at:1401: $PREPARSER ./calc input -stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1407: cat stderr +./calc.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1411: cat stderr +./calc.at:1416: cat stderr +./calc.at:1408: cat stderr stderr: -./calc.at:1431: $PREPARSER ./calc /dev/null -./calc.at:1398: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -1.6: syntax error: invalid character: '#' -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -534. calc.at:1446: testing Calculator C++ %header %locations parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) -Entering state 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: ./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -110491,13 +102730,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (- *) + (1 2) = 1 +./calc.at:1395: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +stderr: + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !- ++ +./calc.at:1411: $PREPARSER ./calc /dev/null +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1375: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -110517,78 +102765,10 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1408: cat stderr -./calc.at:1446: mv calc.y.tmp calc.y - +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -110599,203 +102779,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: cat stderr -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: $PREPARSER ./calc input -stderr: -./calc.at:1402: cat stderr -1.6: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1413: cat stderr -syntax error -input: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1408: $PREPARSER ./calc input -input: -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1413: $PREPARSER ./calc /dev/null -input: - | (1 + # + 1) = 1111 -./calc.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - | (1 + # + 1) = 1111 -./calc.at:1402: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1401: cat stderr -stderr: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) - | 1 + 2 * 3 + !+ ++ -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1405: cat stderr -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1416: cat stderr -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -110805,78 +102795,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -110886,149 +102805,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -stderr: -1.6: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111039,21 +102816,11 @@ }eg ' expout || exit 77 input: -stderr: -./calc.at:1416: $PREPARSER ./calc /dev/null -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1405: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -stderr: -stderr: -./calc.at:1407: cat stderr -./calc.at:1414: cat stderr -stderr: -./calc.at:1408: "$PERL" -pi -e 'use strict; +input: +./calc.at:1413: cat stderr +input: +./calc.at:1371: cat stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111063,8 +102830,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111074,95 +102840,53 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.6: syntax error: invalid character: '#' -1.11-17: error: null divisor -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: + | + | +1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1416: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1402: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: ./check +input: +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1397: cat stderr ' expout || exit 77 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1413: "$PERL" -pi -e 'use strict; +./calc.at:1398: cat stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111172,111 +102896,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1411: $PREPARSER ./calc input +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: cat stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | (1 + # + 1) = 1111 +./calc.at:1394: cat stderr stderr: -./calc.at:1431: cat stderr -input: -1.2: syntax error: invalid character: '#' -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) stderr: -./calc.at:1414: $PREPARSER ./calc input -input: stderr: - | (- *) + (1 2) = 1 +stderr: +./calc.at:1413: $PREPARSER ./calc /dev/null +./calc.at:1371: $PREPARSER ./calc input stderr: stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1409: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: cat stderr -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1408: cat stderr -1.2: syntax error: invalid character: '#' -stdout: -stderr: -./types.at:139: $PREPARSER ./test Starting parse Entering state 0 Reading a token @@ -111291,7 +102948,7 @@ Next token is token ')' (1.2: ) Shifting token ')' (1.2: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -111327,7 +102984,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -111346,7 +103003,7 @@ Entering state 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -111365,7 +103022,7 @@ Next token is token ')' (1.18: ) Shifting token ')' (1.18: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -111373,7 +103030,7 @@ Entering state 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -111405,7 +103062,7 @@ Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -111413,7 +103070,7 @@ Entering state 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -111448,7 +103105,7 @@ Entering state 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -111472,7 +103129,7 @@ Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -111480,7 +103137,7 @@ Entering state 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -111524,98 +103181,129 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./calc.at:1413: cat stderr Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 3) +Shifting token error (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1401: cat stderr -Starting parse -Entering state 0 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Reading a token Next token is token '*' (1.1: ) syntax error Shifting token error (1.1: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Shifting token error (1.1: ) Entering state 11 Next token is token '*' (1.1: ) Error: discarding token '*' (1.1: ) Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token Next token is token ')' (1.1: ) Entering state 11 Next token is token ')' (1.1: ) @@ -111626,9 +103314,16 @@ $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 8 +Entering state 30 Reading a token Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token @@ -111644,13 +103339,37 @@ -> $$ = nterm exp (1.1: 1) Entering state 12 Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token '*' (1.1: ) syntax error -Error: popping nterm exp (1.1: 1) +Error: popping token '*' (1.1: ) +Error: popping nterm exp (1.1: 2) Shifting token error (1.1: ) Entering state 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -111666,10 +103385,10 @@ Reading a token Next token is token '=' (1.1: ) Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 3333) $2 = token '+' (1.1: ) $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 4444) Entering state 8 Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) @@ -111685,17 +103404,17 @@ Reading a token Next token is token '\n' (1.1: ) Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 4444) $2 = token '=' (1.1: ) $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 4444) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -111709,13 +103428,53 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1398: cat stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: cat stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1393: cat stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1405: cat stderr +./calc.at:1389: cat stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +' expout || exit 77 +./calc.at:1432: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1401: cat stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: cat stderr stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +1.6: syntax error: invalid character: '#' +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1375: cat stderr input: input: -input: -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111725,12 +103484,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: cat stderr +./calc.at:1390: cat stderr input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1414: cat stderr + | (- *) + (1 2) = 1 + | (* *) + (*) + (*) + | (- *) + (1 2) = 1 +input: + | (* *) + (*) + (*) +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1394: $PREPARSER ./calc input stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1405: cat stderr +stderr: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr +input: Starting parse Entering state 0 Reading a token @@ -111745,7 +103517,7 @@ Next token is token ')' (1.2: ) Shifting token ')' (1.2: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -111781,7 +103553,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -111800,7 +103572,7 @@ Entering state 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -111819,7 +103591,7 @@ Next token is token ')' (1.18: ) Shifting token ')' (1.18: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -111827,7 +103599,7 @@ Entering state 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -111859,7 +103631,7 @@ Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -111867,7 +103639,7 @@ Entering state 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -111902,7 +103674,7 @@ Entering state 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -111926,7 +103698,7 @@ Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -111934,7 +103706,7 @@ Entering state 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -111978,230 +103750,129 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) - | (1 + # + 1) = 1111 -./calc.at:1402: $PREPARSER ./calc input -./calc.at:1408: $PREPARSER ./calc input -./calc.at:1431: $PREPARSER ./calc input -input: -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -511. calc.at:1401: ok - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -======== Testing with C++ standard flags: '' -./calc.at:1413: $PREPARSER ./calc input -stderr: -stderr: -stderr: -./calc.at:1398: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1411: cat stderr -./calc.at:1416: cat stderr -stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 3) +Shifting token error (1.1: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Reading a token Next token is token '*' (1.1: ) syntax error Shifting token error (1.1: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Shifting token error (1.1: ) Entering state 11 Next token is token '*' (1.1: ) Error: discarding token '*' (1.1: ) Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token Next token is token ')' (1.1: ) Entering state 11 Next token is token ')' (1.1: ) @@ -112212,9 +103883,16 @@ $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 8 +Entering state 30 Reading a token Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token @@ -112230,13 +103908,37 @@ -> $$ = nterm exp (1.1: 1) Entering state 12 Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token '*' (1.1: ) syntax error -Error: popping nterm exp (1.1: 1) +Error: popping token '*' (1.1: ) +Error: popping nterm exp (1.1: 2) Shifting token error (1.1: ) Entering state 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -112252,10 +103954,10 @@ Reading a token Next token is token '=' (1.1: ) Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 3333) $2 = token '+' (1.1: ) $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 4444) Entering state 8 Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) @@ -112271,17 +103973,17 @@ Reading a token Next token is token '\n' (1.1: ) Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 4444) $2 = token '=' (1.1: ) $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 4444) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -112296,12 +103998,268 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) stderr: -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (* *) + (*) + (*) +input: +input: +input: +input: +stderr: + | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1390: $PREPARSER ./calc input +stderr: +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 + | 1 + 2 * 3 + !* ++ +stderr: +input: +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1397: $PREPARSER ./calc input +stderr: +input: +./calc.at:1392: cat stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +stderr: +stderr: +stdout: + | (* *) + (*) + (*) +1.6: syntax error: invalid character: '#' +1.2: syntax error +1.10: syntax error +1.16: syntax error + | (* *) + (*) + (*) +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1374: cat stderr +stderr: +./calc.at:1409: cat stderr +1.14: memory exhausted + | (- *) + (1 2) = 1 +./types.at:139: ./check +stderr: +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error +1.14: memory exhausted +stderr: +./calc.at:1411: cat stderr +syntax error +syntax error +error: 2222 != 1 +input: +input: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: + | (- *) + (1 2) = 1 + | (* *) + (*) + (*) + | (- *) + (1 2) = 1 +syntax error +syntax error +error: 2222 != 1 + | 1 + 2 * 3 + !+ ++ +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1403: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +./calc.at:1391: $PREPARSER ./calc input +stderr: +stderr: +syntax error +syntax error +syntax error +stderr: +1.14: memory exhausted +syntax error +syntax error +error: 2222 != 1 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +1.2: syntax error +1.10: syntax error +1.16: syntax error +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +stderr: +input: +input: +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: cat stderr +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1408: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +stderr: +./calc.at:1409: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1411: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1371: cat stderr + | 1 + 2 * 3 + !+ ++ + | 1 + 2 * 3 + !- ++ +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +1.14: memory exhausted +./calc.at:1390: $PREPARSER ./calc input +syntax error +syntax error +syntax error +stderr: +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +stderr: +stderr: stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | (1 + # + 1) = 1111 stderr: -./calc.at:1405: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -112316,7 +104274,7 @@ Next token is token ')' (1.2: ) Shifting token ')' (1.2: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -112352,7 +104310,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -112371,7 +104329,7 @@ Entering state 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -112390,7 +104348,7 @@ Next token is token ')' (1.18: ) Shifting token ')' (1.18: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -112398,7 +104356,7 @@ Entering state 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -112430,7 +104388,7 @@ Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -112438,7 +104396,7 @@ Entering state 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -112473,7 +104431,7 @@ Entering state 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -112497,7 +104455,7 @@ Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -112505,7 +104463,7 @@ Entering state 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -112549,162 +104507,248 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -input: -1.11-17: error: null divisor -input: -1.11-17: error: null divisor -stderr: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error syntax error syntax error -syntax error -error: 4444 != 1 +error: 2222 != 1 +stderr: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | 1 + 2 * 3 + !* ++ -./calc.at:1411: $PREPARSER ./calc input -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -112717,92 +104761,48 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.6: syntax error: invalid character: '#' -./calc.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +syntax error +syntax error +syntax error +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -1.11-17: error: null divisor -stdout: - -1.6: syntax error: invalid character: '#' -stderr: -stderr: +input: +input: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: cat stderr +./calc.at:1407: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1413: cat stderr +./calc.at:1400: cat stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1405: cat stderr ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./types.at:139: ./check -./calc.at:1407: "$PERL" -pi -e 'use strict; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1371: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (* *) + (*) + (*) + | (!!) + (1 2) = 1 +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -112812,78 +104812,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1408: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +stderr: +./calc.at:1416: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Reading a token @@ -113131,32 +105079,13 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: cat stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1405: cat stderr -stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -stderr: -' expout || exit 77 +syntax error +syntax error +error: 2222 != 1 +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Starting parse Entering state 0 Reading a token @@ -113171,7 +105100,7 @@ Next token is token ')' (1.2: ) Shifting token ')' (1.2: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -113207,7 +105136,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -113226,7 +105155,7 @@ Entering state 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -113245,7 +105174,7 @@ Next token is token ')' (1.18: ) Shifting token ')' (1.18: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -113253,7 +105182,7 @@ Entering state 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -113285,7 +105214,7 @@ Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -113293,7 +105222,7 @@ Entering state 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -113328,7 +105257,7 @@ Entering state 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -113352,7 +105281,7 @@ Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -113360,7 +105289,7 @@ Entering state 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -113404,8 +105333,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1402: cat stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -113415,7 +105343,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1411: "$PERL" -pi -e 'use strict; +input: +input: +./calc.at:1401: cat stderr +input: +input: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -113425,7 +105361,169 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | 1 + 2 * 3 + !+ ++ +./calc.at:1414: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (#) + (#) = 2222 + | 1 + 2 * 3 + !- ++ +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1391: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1375: cat stderr +./calc.at:1405: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1407: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +./calc.at:1402: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.11-17: error: null divisor +./calc.at:1390: cat stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1397: cat stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -113436,6 +105534,127 @@ }eg ' expout || exit 77 stderr: + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1374: $PREPARSER ./calc input +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +stderr: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -113683,19 +105902,18 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stdout: -./types.at:139: ./check -./calc.at:1398: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' input: -./calc.at:1408: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1407: cat stderr -./calc.at:1409: cat stderr -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1411: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -512. calc.at:1402: ok -./calc.at:1416: "$PERL" -pi -e 'use strict; +input: +input: +input: +input: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -113705,28 +105923,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.11-17: error: null divisor -./calc.at:1431: cat stderr -input: -input: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -509. calc.at:1398: | (1 + 1) / (1 - 1) - ok - | (!!) + (1 2) = 1 -./calc.at:1414: $PREPARSER ./calc input -input: -./calc.at:1408: $PREPARSER ./calc input -input: - | (* *) + (*) + (*) -input: +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (#) + (#) = 2222 -stderr: -./calc.at:1416: cat stderr -stderr: -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1413: "$PERL" -pi -e 'use strict; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -113736,14 +105943,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (!!) + (1 2) = 1 + | 1 + 2 * 3 + !+ ++ | (#) + (#) = 2222 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1409: $PREPARSER ./calc input -535. calc.at:1448: testing Calculator C++ %header %locations api.location.file=none ... -./calc.at:1448: mv calc.y.tmp calc.y - +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1398: cat stderr +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1403: cat stderr +stderr: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1397: $PREPARSER ./calc input +stderr: +stderr: + | 1 + 2 * 3 + !* ++ +stderr: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1390: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +1.11-17: error: null divisor +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -113758,7 +105995,7 @@ Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) Entering state 16 -Reducing stack 0 by rule 16 (line 116): +Reducing stack 0 by rule 16 (line 128): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Shifting token error (1.2-3: ) @@ -113767,7 +106004,7 @@ Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -113803,7 +106040,7 @@ Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -113811,7 +106048,7 @@ Entering state 30 Reading a token Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -113856,227 +106093,13 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -1.11-17: error: null divisor -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -error: 2222 != 1 stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token 1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token 1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) Starting parse Entering state 0 Reading a token @@ -114084,19 +106107,20 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.1: ) -syntax error +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) Shifting token error (1.1: ) Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) Reading a token Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 Reducing stack 0 by rule 14 (line 119): @@ -114114,12 +106138,21 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.1: ) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.1: 2) syntax error +Error: popping nterm exp (1.1: 1) Shifting token error (1.1: ) Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -114133,52 +106166,38 @@ -> $$ = nterm exp (1.1: 1111) Entering state 30 Reading a token -Next token is token '+' (1.1: ) +Next token is token '=' (1.1: ) Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 1111) $2 = token '+' (1.1: ) $3 = nterm exp (1.1: 1111) -> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 Reading a token Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 7 (line 98): +Reducing stack 0 by rule 6 (line 82): $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) + $1 = nterm exp (1.1: 2222) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -114192,13 +106211,6 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) - -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1413: cat stderr -./calc.at:1448: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -stderr: Starting parse Entering state 0 Reading a token @@ -114206,218 +106218,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -stdout: -stderr: -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 30 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -114430,7 +106458,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: ./check Starting parse Entering state 0 Reading a token @@ -114438,188 +106465,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -syntax error -error: 2222 != 1 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1405: cat stderr - - | (!!) + (1 2) = 1 -Starting parse -Entering state 0 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token ')' (1.3: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.44: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -114632,11 +106705,69 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr +input: +./calc.at:1387: cat stderr +./calc.at:1409: cat stderr +stderr: +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | (* *) + (*) + (*) +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1394: cat stderr +1.14: memory exhausted +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: input: +input: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1405: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !- ++ +./calc.at:1401: $PREPARSER ./calc input +stderr: +stderr: + | 1 + 2 * 3 + !+ ++ +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1397: $PREPARSER ./calc input +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.14: memory exhausted Starting parse Entering state 0 Reading a token @@ -114644,212 +106775,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 Reading a token -Next token is token ')' (1.1: ) +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | (!!) + (1 2) = 1 -stderr: -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token ')' (1.3: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.44: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -114863,7 +107016,10 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1391: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -114873,7 +107029,60 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -514. calc.at:1405: ok +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: cat stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +input: +stderr: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +input: +./calc.at:1393: cat stderr + | (!!) + (1 2) = 1 +stderr: +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1392: cat stderr + | 1 + 2 * 3 + !+ ++ +stderr: + | (* *) + (*) + (*) +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1409: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -114888,7 +107097,7 @@ Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) Entering state 16 -Reducing stack 0 by rule 16 (line 116): +Reducing stack 0 by rule 16 (line 128): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Shifting token error (1.2-3: ) @@ -114897,7 +107106,7 @@ Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -114933,7 +107142,7 @@ Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -114941,7 +107150,7 @@ Entering state 30 Reading a token Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -114985,19 +107194,35 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1374: cat stderr +stderr: +stdout: +./calc.at:1426: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./calc.at:1371: cat stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +input: +input: +./calc.at:1400: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1395: cat stderr ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -115008,7 +107233,75 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: cat stderr +./calc.at:1408: cat stderr +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !+ ++ +stderr: + | (* *) + (*) + (*) +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: $PREPARSER ./calc input +stderr: +input: +stderr: + | 1 + 2 * 3 + !* ++ +stderr: +stderr: +stderr: +./calc.at:1391: $PREPARSER ./calc input +stderr: +syntax error +syntax error +syntax error +stderr: + | (* *) + (*) + (*) +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (- *) + (1 2) = 1 +./calc.at:1408: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -115120,28 +107413,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token @@ -115156,7 +107427,7 @@ Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) Entering state 16 -Reducing stack 0 by rule 16 (line 116): +Reducing stack 0 by rule 16 (line 128): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Shifting token error (1.2-3: ) @@ -115165,7 +107436,7 @@ Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -115201,7 +107472,7 @@ Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -115209,7 +107480,7 @@ Entering state 30 Reading a token Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -115253,17 +107524,18 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !* ++ +./calc.at:1374: $PREPARSER ./calc input +497. calc.at:1371: ok +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1408: cat stderr - - | (- *) + (1 2) = 1 -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1411: cat stderr -./calc.at:1431: cat stderr +stderr: input: -./calc.at:1407: cat stderr stderr: -./calc.at:1413: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !- ++ + | (1 + #) = 1111 +stderr: +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -115273,8 +107545,56 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1409: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1416: cat stderr +stderr: +memory exhausted +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted +stderr: +stderr: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +stderr: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1400: $PREPARSER ./calc input +syntax error +syntax error +syntax error +input: +stderr: +syntax error +syntax error +syntax error Starting parse Entering state 0 Reading a token @@ -115290,7 +107610,7 @@ 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 -Reducing stack 0 by rule 15 (line 115): +Reducing stack 0 by rule 15 (line 127): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Shifting token error (1.2-4: ) @@ -115303,7 +107623,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -115339,7 +107659,7 @@ Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -115347,7 +107667,7 @@ Entering state 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -115391,11 +107711,10 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -516. calc.at:1408: ok -stderr: -stderr: -536. calc.at:1449: testing Calculator C++ %header %locations api.location.file="my-location.hh" ... +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr Starting parse Entering state 0 Reading a token @@ -115507,99 +107826,65 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token +./calc.at:1414: cat stderr +stderr: + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./calc.at:1401: cat stderr 1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 - | (1 + #) = 1111 -537. calc.at:1451: testing Calculator C++ %no-lines %header %locations api.location.file="my-location.hh" ... -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1413: cat stderr -./calc.at:1451: mv calc.y.tmp calc.y +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +syntax error +syntax error +syntax error + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +' expout || exit 77 +1.14: memory exhausted +stderr: +stdout: -./calc.at:1449: mv calc.y.tmp calc.y +./types.at:139: $PREPARSER ./test +525. calc.at:1433: testing Calculator C++ %locations $NO_EXCEPTIONS_CXXFLAGS ... +./calc.at:1433: mv calc.y.tmp calc.y -./calc.at:1431: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +input: +./calc.at:1403: cat stderr +./calc.at:1407: cat stderr +./calc.at:1375: cat stderr +./calc.at:1413: cat stderr +./calc.at:1390: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1392: $PREPARSER ./calc input +input: +stderr: +./calc.at:1402: cat stderr +stderr: +stderr: +stderr: +stderr: stderr: +stderr: +syntax error +syntax error +syntax error +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -115615,7 +107900,7 @@ 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 -Reducing stack 0 by rule 15 (line 115): +Reducing stack 0 by rule 15 (line 127): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Shifting token error (1.2-4: ) @@ -115628,7 +107913,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -115664,7 +107949,7 @@ Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -115672,7 +107957,7 @@ Entering state 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -115716,11 +108001,63 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stdout: +memory exhausted + | 1 + 2 * 3 + !- ++ +./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1387: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: cat stderr +input: +input: +input: input: +input: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 + | 1 + 2 * 3 + !* ++ + | (!!) + (1 2) = 1 +./calc.at:1401: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1405: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1407: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input +stderr: +stderr: +stderr: +======== Testing with C++ standard flags: '' +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.14: memory exhausted +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -115728,64 +108065,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -115798,24 +108305,122 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: ./check -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Reading a token +Next token is token '*' (1.1: ) syntax error +Shifting token error (1.1: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.1: 2) syntax error +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) Starting parse Entering state 0 Reading a token @@ -115823,64 +108428,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -115893,23 +108532,37 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !+ ++ -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1449: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stdout: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test - -stderr: -538. calc.at:1453: testing Calculator C++ %locations parse.lac=full parse.error=verbose ... +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; input: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1453: mv calc.y.tmp calc.y - -./calc.at:1414: "$PERL" -pi -e 'use strict; +input: +input: +input: +input: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -115919,6 +108572,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1433: $CXX $CPPFLAGS $CXXFLAGS $NO_EXCEPTIONS_CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | 1 2 +./calc.at:1409: cat stderr +./calc.at:1426: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.14: memory exhausted +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted +stderr: + | (!!) + (1 2) = 1 + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !* ++ +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1395: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1402: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -115926,64 +108607,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -115996,83 +108847,356 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (1 + #) = 1111 +./calc.at:1375: $PREPARSER ./calc input +stderr: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +1.14: memory exhausted +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) Entering state 5 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -stderr: +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1416: cat stderr -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -error: 2222 != 1 -./calc.at:1448: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1453: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -./calc.at:1414: cat stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1398: cat stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116082,7 +109206,90 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: +./calc.at:1411: cat stderr +stderr: +stderr: + | (- *) + (1 2) = 1 +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +stderr: +syntax error +1.14: memory exhausted +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +1.14: memory exhausted +input: +./calc.at:1401: cat stderr +input: +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: cat stderr +./calc.at:1387: cat stderr +./calc.at:1392: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1391: cat stderr +./calc.at:1403: $PREPARSER ./calc input +stderr: +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -116099,7 +109306,7 @@ 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 -Reducing stack 0 by rule 15 (line 115): +Reducing stack 0 by rule 15 (line 127): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Shifting token error (1.2-4: ) @@ -116112,7 +109319,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -116148,7 +109355,7 @@ Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -116156,7 +109363,7 @@ Entering state 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -116200,74 +109407,134 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1409: cat stderr +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !* ++ input: -./calc.at:1411: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +input: +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116277,20 +109544,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: - | (- *) + (1 2) = 1 -======== Testing with C++ standard flags: '' -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1411: cat stderr +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1408: cat stderr | 1 + 2 * 3 + !- ++ -./calc.at:1407: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1414: $PREPARSER ./calc input -stderr: +./calc.at:1402: $PREPARSER ./calc input + | (# + 1) = 1111 stderr: +./calc.at:1400: $PREPARSER ./calc input stderr: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error Starting parse Entering state 0 Reading a token @@ -116306,7 +109579,7 @@ 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 -Reducing stack 0 by rule 15 (line 115): +Reducing stack 0 by rule 15 (line 127): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Shifting token error (1.2-4: ) @@ -116319,7 +109592,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -116355,7 +109628,7 @@ Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -116363,7 +109636,7 @@ Entering state 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -116407,7 +109680,83 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: +input: +input: +./calc.at:1405: cat stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +stderr: +stderr: +stderr: + | (#) + (#) = 2222 +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' + | 1 + 2 * 3 + !* ++ + | (#) + (#) = 2222 +./calc.at:1389: cat stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1401: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +memory exhausted +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.14: memory exhausted + | 1 + 2 * 3 + !* ++ +./calc.at:1393: cat stderr +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -116524,73 +109873,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1395: cat stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116600,79 +109884,72 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1414: cat stderr stderr: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1394: $PREPARSER ./calc input +stdout: stderr: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -539. calc.at:1454: testing Calculator C++ %locations parse.lac=full parse.error=detailed ... -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1454: mv calc.y.tmp calc.y +./calc.at:1431: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc +1.2: syntax error: invalid character: '#' input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) +./calc.at:1390: cat stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: cat stderr +./calc.at:1397: cat stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1416: cat stderr +./calc.at:1413: cat stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (* *) + (*) + (*) + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1408: $PREPARSER ./calc input +stderr: +input: +./calc.at:1403: cat stderr + | (#) + (#) = 2222 +./calc.at:1405: $PREPARSER ./calc input +input: +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -116695,7 +109972,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -116722,7 +109999,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -116730,7 +110007,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -116756,7 +110033,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -116764,7 +110041,7 @@ Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -116788,17 +110065,66 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (# + 1) = 1111 -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1393: $PREPARSER ./calc input +memory exhausted +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1431: $PREPARSER ./calc input +memory exhausted +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1407: cat stderr input: - | (# + 1) = 1111 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1449: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1375: cat stderr +input: +stderr: +stderr: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: cat stderr stderr: stderr: stderr: + | (- *) + (1 2) = 1 +stderr: +stderr: +stderr: +stderr: +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1390: $PREPARSER ./calc input +memory exhausted +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: Starting parse Entering state 0 Reading a token @@ -116806,56 +110132,103 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -116868,7 +110241,44 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: "$PERL" -pi -e 'use strict; +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +./calc.at:1402: cat stderr +stderr: +stdout: +./types.at:139: ./check +input: +input: +input: +input: +input: +input: +./calc.at:1400: cat stderr +input: +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: +stderr: +./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (#) + (#) = 2222 +./calc.at:1374: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !- ++ + | (- *) + (1 2) = 1 + | (!!) + (1 2) = 1 +1.6: syntax error: invalid character: '#' +./calc.at:1416: $PREPARSER ./calc input + | (* *) + (*) + (*) + | 1 + 2 * 3 + !* ++ + | (#) + (#) = 2222 +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116878,6 +110288,254 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1395: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.14: memory exhausted +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -116994,10 +110652,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1451: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1431: cat stderr Starting parse Entering state 0 Reading a token @@ -117020,7 +110674,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -117047,7 +110701,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -117055,7 +110709,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -117081,7 +110735,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -117089,7 +110743,7 @@ Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -117120,71 +110774,141 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +input: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (# + 1) = 1111 stderr: -./calc.at:1407: "$PERL" -pi -e 'use strict; + | (* *) + (*) + (*) + | 1 + 2 * 3 + !- ++ +input: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117194,7 +110918,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ stderr: +stderr: +stderr: +stderr: +======== Testing with C++ standard flags: '' +stderr: +stderr: +memory exhausted +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.14: memory exhausted +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -117202,56 +110961,369 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1401: cat stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: cat stderr +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +input: +stderr: +1.6: syntax error: invalid character: '#' +1.14: memory exhausted +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -117264,8 +111336,10 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: cat stderr -input: + | 1 2 +1.2: syntax error: invalid character: '#' +./calc.at:1431: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -117273,56 +111347,214 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token ')' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -117335,10 +111567,22 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1453: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS - | (* *) + (*) + (*) -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1414: "$PERL" -pi -e 'use strict; +stderr: +memory exhausted +./calc.at:1394: cat stderr +stderr: +stderr: +stdout: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +./calc.at:1405: cat stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117348,13 +111592,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (* *) + (*) + (*) -stderr: -./calc.at:1407: cat stderr -./calc.at:1413: $PREPARSER ./calc input -stdout: -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117364,24 +111602,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1411: cat stderr +input: +input: +stderr: +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $PREPARSER ./test -syntax error -syntax error -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117392,10 +111629,19 @@ }eg ' expout || exit 77 stderr: -input: -syntax error -syntax error + | (#) + (#) = 2222 +./calc.at:1398: $PREPARSER ./calc input syntax error + | (1 + #) = 1111 +stderr: +./calc.at:1401: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +stderr: +1.14: memory exhausted +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -117418,7 +111664,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -117445,7 +111691,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -117453,7 +111699,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -117479,7 +111725,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -117487,7 +111733,7 @@ Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -117511,84 +111757,126 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1387: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1391: cat stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1408: cat stderr +stderr: +stderr: + | (* *) + (*) + (*) +./calc.at:1411: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +input: +input: +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 + | (1 + #) = 1111 | 1 + 2 * 3 + !* ++ +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1394: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +stderr: +syntax error +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: cat stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1403: cat stderr +./calc.at:1389: cat stderr +./calc.at:1393: cat stderr stderr: -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1409: cat stderr -./calc.at:1414: cat stderr -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: -./calc.at:1416: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -117704,88 +111992,67 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -======== Testing with C++ standard flags: '' -./calc.at:1411: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +memory exhausted +syntax error input: +./calc.at:1400: cat stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + # + 1) = 1111 -stderr: - | 1 + 2 * 3 + !+ ++ +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1397: cat stderr input: -./calc.at:1409: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1414: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1390: cat stderr +./calc.at:1416: cat stderr +./calc.at:1413: cat stderr +./calc.at:1414: cat stderr + | (#) + (#) = 2222 + | (1 + #) = 1111 + | 1 + 2 * 3 + !+ ++ +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1408: $PREPARSER ./calc input stderr: stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +input: +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117795,90 +112062,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) input: +stderr: + | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !* ++ + | (#) + (#) = 2222 +stdout: +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1393: $PREPARSER ./calc input +stderr: +stderr: + | (#) + (#) = 2222 +stderr: +stderr: +stderr: +./calc.at:1392: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1403: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +stderr: +stdout: +1.6: syntax error: invalid character: '#' +./types.at:139: ./check +syntax error Starting parse Entering state 0 Reading a token @@ -117915,14 +112123,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -117939,27 +112147,12 @@ Next token is token '+' (1.14: ) Shifting token '+' (1.14: ) Entering state 14 -Reducing stack 0 by rule 17 (line 117): +Reducing stack 0 by rule 17 (line 129): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1411: $PREPARSER ./calc input -stderr: -stderr: +memory exhausted Starting parse Entering state 0 Reading a token @@ -117967,70 +112160,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token Next token is token ')' (1.11: ) Entering state 11 Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -118043,6 +112268,96 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./types.at:139: $PREPARSER ./test +input: +input: +./calc.at:1374: cat stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: cat stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: +./calc.at:1395: cat stderr +./calc.at:1402: cat stderr + | (* *) + (*) + (*) + | (- *) + (1 2) = 1 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1414: $PREPARSER ./calc input + | (# + 1) = 1111 + | (* *) + (*) + (*) +stderr: + | (1 + #) = 1111 +stdout: +./calc.at:1416: $PREPARSER ./calc input +stderr: +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1397: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1400: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./types.at:139: $PREPARSER ./test +memory exhausted +syntax error: invalid character: '#' +syntax error: invalid character: '#' +memory exhausted +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1401: cat stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: cat stderr +./calc.at:1409: cat stderr +stderr: +stderr: +stderr: +stderr: +======== Testing with C++ standard flags: '' +1.11-17: error: null divisor +stderr: +stderr: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -118158,18 +112473,9 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -118206,14 +112512,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -118230,12 +112536,11 @@ Next token is token '+' (1.14: ) Shifting token '+' (1.14: ) Entering state 14 -Reducing stack 0 by rule 17 (line 117): +Reducing stack 0 by rule 17 (line 129): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1413: cat stderr Starting parse Entering state 0 Reading a token @@ -118243,162 +112548,103 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1431: cat stderr -stderr: -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -118526,73 +112772,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -118602,83 +112782,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1407: cat stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: stderr: -./types.at:139: ./check -./calc.at:1409: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' + | (#) + (#) = 2222 +./calc.at:1402: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +memory exhausted +memory exhausted input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1416: "$PERL" -pi -e 'use strict; +input: +input: +input: +./calc.at:1431: cat stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -118688,82 +112816,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - | (#) + (#) = 2222 -./calc.at:1407: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1411: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" @@ -118771,10 +112829,31 @@ }eg ' expout || exit 77 input: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1398: cat stderr + | (1 + #) = 1111 + | 1 + 2 * 3 + !+ ++ stderr: -./calc.at:1431: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) + | 1 + 2 * 3 + !+ ++ + | (#) + (#) = 2222 +./calc.at:1409: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: $PREPARSER ./calc input +stderr: + | (1 + # + 1) = 1111 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1375: $PREPARSER ./calc input +stderr: +stderr: +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +stderr: +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -118782,103 +112861,120 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token ')' (1.1: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token ')' (1.1: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: cat stderr -stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1414: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' +1.11-17: error: null divisor +input: +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -118888,7 +112984,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1 + 2 * 3 + !- ++ + | (# + 1) = 1111 +stderr: +./calc.at:1408: $PREPARSER ./calc input +stderr: +stderr: +stderr: +./calc.at:1405: $PREPARSER ./calc input +stderr: +stderr: +stderr: +syntax error: invalid character: '#' stderr: Starting parse Entering state 0 @@ -118896,74 +113004,68 @@ Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 31 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) Entering state 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stderr: -input: -input: - | 1 + 2 * 3 + !- ++ - | 1 + 2 * 3 + !+ ++ -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1416: $PREPARSER ./calc input +Reducing stack 0 by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -118971,199 +113073,101 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token ')' (1.1: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token ')' (1.1: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 +Entering state 11 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -119177,76 +113181,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (1 + 1) / (1 - 1) -./calc.at:1411: $PREPARSER ./calc input -stderr: +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -119283,14 +113218,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -119307,15 +113242,11 @@ Next token is token '+' (1.14: ) Shifting token '+' (1.14: ) Entering state 14 -Reducing stack 0 by rule 17 (line 117): +Reducing stack 0 by rule 17 (line 129): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stderr: -input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -119323,219 +113254,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -119549,12 +113363,47 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !* ++ - | 1 + 2 * 3 + !- ++ +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ + | 1//2 + | (1 + #) = 1111 ./calc.at:1431: $PREPARSER ./calc input -./calc.at:1414: $PREPARSER ./calc input +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1411: $PREPARSER ./calc input stderr: stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -119591,14 +113440,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -119615,14 +113464,110 @@ Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 13 -Reducing stack 0 by rule 18 (line 118): +Reducing stack 0 by rule 18 (line 130): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +1.2: syntax error: invalid character: '#' +stderr: +stderr: +stdout: +stdout: +./types.at:139: ./check +./types.at:139: ./check +stderr: +stdout: +./calc.at:1432: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1400: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: cat stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1394: cat stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stderr: stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -119688,124 +113633,75 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +syntax error +1.6: syntax error: invalid character: '#' +syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -119842,14 +113738,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -119863,28 +113759,32 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) - | 1 + 2 * 3 + !- ++ -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +syntax error: invalid character: '#' +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119894,8 +113794,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: "$PERL" -pi -e 'use strict; +./calc.at:1391: cat stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119905,7 +113805,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: cat stderr +stderr: +stderr: stderr: +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -119942,14 +113849,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -119966,23 +113873,20 @@ Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 13 -Reducing stack 0 by rule 18 (line 118): +Reducing stack 0 by rule 18 (line 130): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +510. calc.at:1400: ok + +input: +input: +input: +input: +./calc.at:1387: cat stderr +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119993,6 +113897,19 @@ }eg ' expout || exit 77 ./calc.at:1413: cat stderr + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !- ++ + | (#) + (#) = 2222 +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1394: $PREPARSER ./calc input +stderr: + | error +stderr: +stderr: +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1426: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -120050,20 +113967,51 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1409: cat stderr -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: cat stderr +1.6: syntax error: invalid character: '#' +stdout: +syntax error +./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./types.at:139: $PREPARSER ./test +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1397: cat stderr +./calc.at:1390: cat stderr +./calc.at:1401: cat stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: cat stderr +./calc.at:1393: cat stderr +./calc.at:1414: cat stderr +./calc.at:1416: cat stderr stderr: -./calc.at:1407: cat stderr +stderr: +input: +input: +input: Starting parse Entering state 0 Reading a token @@ -120100,14 +114048,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -120124,30 +114072,95 @@ Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 13 -Reducing stack 0 by rule 18 (line 118): +Reducing stack 0 by rule 18 (line 130): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1431: cat stderr -input: -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -517. calc.at:1409: | 1 + 2 * 3 + !* ++ - ok -./calc.at:1413: $PREPARSER ./calc input -518. calc.at:1411: ok +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1416: "$PERL" -pi -e 'use strict; +stderr: + | (# + 1) = 1111 + | (1 + #) = 1111 +stderr: +./calc.at:1391: $PREPARSER ./calc input +syntax error +./calc.at:1403: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1411: $PREPARSER ./calc input +stderr: +./calc.at:1408: cat stderr +stdout: +./types.at:139: ./check +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120157,7 +114170,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: cat stderr Starting parse Entering state 0 Reading a token @@ -120194,14 +114208,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -120215,30 +114229,81 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1414: cat stderr input: - | (#) + (#) = 2222 -./calc.at:1416: cat stderr - | (1 + #) = 1111 -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1431: $PREPARSER ./calc input +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1405: cat stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: input: +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +526. calc.at:1434: testing Calculator C++ %locations api.location.type={Span} ... +input: +input: +input: + | (#) + (#) = 2222 + | 1 + 2 * 3 + !+ ++ + | (# + 1) = 1111 + | (1 + # + 1) = 1111 | (#) + (#) = 2222 + | (1 + # + 1) = 1111 +input: +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: $PREPARSER ./calc input ./calc.at:1414: $PREPARSER ./calc input stderr: stderr: +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1393: $PREPARSER ./calc input stderr: - +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1375: cat stderr +1.2: syntax error: invalid character: '#' + | 1 + 2 * 3 + !+ ++ + | 1 2 +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1413: $PREPARSER ./calc input +stderr: + | (* *) + (*) + (*) +stderr: +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1401: $PREPARSER ./calc input + | (1 + #) = 1111 +syntax error: invalid character: '#' +syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -120296,17 +114361,709 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1395: cat stderr +./calc.at:1402: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1434: mv calc.y.tmp calc.y + +syntax error +stdout: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1433: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +stderr: +./calc.at:1434: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stdout: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./types.at:139: ./check +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 + | 1 + 2 * 3 + !* ++ +stderr: +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.3: syntax error +stderr: +stdout: +input: +./types.at:139: ./check + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1433: $PREPARSER ./calc input +input: +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 + | (1 + #) = 1111 + | (1 + 1) / (1 - 1) + | (1 + # + 1) = 1111 +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: cat stderr +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1398: cat stderr +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1375: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): $1 = token '!' (1.13: ) $2 = token '*' (1.14: ) 1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +stderr: +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +stderr: +stderr: +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +syntax error: invalid character: '#' syntax error: invalid character: '#' +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1409: cat stderr +1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +stderr: +stderr: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +1.11-17: error: null divisor Starting parse Entering state 0 Reading a token @@ -120314,82 +115071,1599 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +./calc.at:1401: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: + | (# + 1) = 1111 +./calc.at:1398: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1413: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1374: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1411: cat stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1407: cat stderr +input: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1394: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1414: $PREPARSER ./calc input +stderr: + | 1 + 2 * 3 + !* ++ +./calc.at:1409: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +1.11-17: error: null divisor +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.2: syntax error: invalid character: '#' +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 +./calc.at:1433: $PREPARSER ./calc input +stderr: + | 1 + 2 * 3 + !* ++ +./calc.at:1407: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) + | 1 + 2 * 3 + !* ++ +./calc.at:1411: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1431: $PREPARSER ./calc input +stderr: +./calc.at:1391: cat stderr +./calc.at:1426: cat stderr +./calc.at:1432: cat stderr +./calc.at:1401: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1416: cat stderr +./calc.at:1390: cat stderr +1.2: syntax error: invalid character: '#' +stderr: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.11-17: error: null divisor +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.3: syntax error +stderr: +stdout: +./types.at:139: $PREPARSER ./test +input: +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: cat stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: cat stderr +./calc.at:1408: cat stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: cat stderr + | 1//2 +./calc.at:1432: $PREPARSER ./calc input + | (1 + #) = 1111 + | 1 = 2 = 3 +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +syntax error +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 Reading a token Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1387: cat stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +./calc.at:1397: cat stderr +./calc.at:1375: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1416: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: + | (1 + 1) / (1 - 1) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | (1 + # + 1) = 1111 +1.3: syntax error +./calc.at:1391: $PREPARSER ./calc input +syntax error +syntax error syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) +./calc.at:1390: $PREPARSER ./calc input +stderr: +======== Testing with C++ standard flags: '' +stdout: +stderr: +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: ./check +1.11-17: error: null divisor +stderr: +1.3: syntax error +input: +input: +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1393: cat stderr +input: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1395: cat stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 + | (# + 1) = 1111 +stderr: + | (#) + (#) = 2222 +./calc.at:1403: $PREPARSER ./calc input +stderr: +./calc.at:1408: $PREPARSER ./calc input +stderr: + | (1 + #) = 1111 +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1389: $PREPARSER ./calc input +1.11-17: error: null divisor +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1413: cat stderr +stderr: +stderr: +stderr: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.3: syntax error +1.2: syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.1: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (# + 1) = 1111 +./calc.at:1387: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +' expout || exit 77 +./calc.at:1392: cat stderr +input: +input: +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1398: cat stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: cat stderr +input: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +stderr: + | 123 +input: +./calc.at:1375: $PREPARSER ./calc --num input +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: $PREPARSER ./calc input + | (1 + # + 1) = 1111 + | (1 + #) = 1111 + | (# + 1) = 1111 +./calc.at:1397: $PREPARSER ./calc input +stderr: +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1395: $PREPARSER ./calc input +syntax error: invalid character: '#' +stderr: +stderr: +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.11-17: error: null divisor +1.2: syntax error: invalid character: '#' +./calc.at:1414: cat stderr +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +input: +./calc.at:1374: cat stderr +input: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: cat stderr +./calc.at:1411: cat stderr +stderr: + | (# + 1) = 1111 +stderr: +stderr: +stderr: +./calc.at:1392: $PREPARSER ./calc input stderr: -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !* ++ +./calc.at:1413: $PREPARSER ./calc input +stderr: +1.11-17: error: null divisor +1.6: syntax error: invalid character: '#' +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Reading a token @@ -120409,7 +116683,7 @@ Next token is token ')' (1.3: ) Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -120436,7 +116710,7 @@ Next token is token ')' (1.9: ) Shifting token ')' (1.9: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -120444,7 +116718,7 @@ Entering state 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -120487,11 +116761,78 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (#) + (#) = 2222 + | 1 + 2 * 3 + !* ++ +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + # + 1) = 1111 +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1409: $PREPARSER ./calc input +input: +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1433: cat stderr +syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ ./calc.at:1416: $PREPARSER ./calc input -stdout: -./types.at:139: $PREPARSER ./test -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -120558,9 +116899,83 @@ 1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) - + | (1 + # + 1) = 1111 +./calc.at:1374: $PREPARSER ./calc input stderr: -./calc.at:1413: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +stderr: +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120571,89 +116986,93 @@ }eg ' expout || exit 77 stderr: +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $PREPARSER ./test +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.11-17: error: null divisor +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): +Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 +Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +511. calc.at:1401: ok stderr: -./calc.at:1413: cat stderr Starting parse Entering state 0 Reading a token @@ -120673,7 +117092,7 @@ Next token is token ')' (1.3: ) Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -120700,7 +117119,7 @@ Next token is token ')' (1.9: ) Shifting token ')' (1.9: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -120708,7 +117127,7 @@ Entering state 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -120751,8 +117170,129 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: cat stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stdout: +./types.at:139: ./check + +syntax error: invalid character: '#' +527. calc.at:1435: testing Calculator C++ %header %locations parse.error=verbose %name-prefix "calc" %verbose ... +./calc.at:1435: mv calc.y.tmp calc.y + +./calc.at:1407: cat stderr +input: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1432: cat stderr + | 1//2 + | (#) + (#) = 2222 +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1433: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +./calc.at:1435: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Reading a token @@ -120819,19 +117359,14 @@ 1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -======== Testing with C++ standard flags: '' -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1390: cat stderr +input: +./calc.at:1426: cat stderr +input: +./calc.at:1431: cat stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120841,7 +117376,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120851,10 +117386,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1413: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1403: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120864,10 +117397,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr + | 1 + 2 * 3 + | error + | (1 + # + 1) = 1111 +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1375: $PREPARSER ./calc --num input +./calc.at:1402: $PREPARSER ./calc input +stderr: +stderr: stderr: -./calc.at:1431: cat stderr -./calc.at:1407: cat stderr Starting parse Entering state 0 Reading a token @@ -120965,104 +117503,74 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: cat stderr -input: -input: - | (1 + #) = 1111 - | (1 + #) = 1111 -input: -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: $PREPARSER ./calc input -540. calc.at:1455: testing Calculator C++ %locations parse.lac=full parse.error=detailed parse.trace ... - | (# + 1) = 1111 -./calc.at:1455: mv calc.y.tmp calc.y - -./calc.at:1407: $PREPARSER ./calc input -input: -stderr: -stderr: - | (#) + (#) = 2222 -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1416: $PREPARSER ./calc input +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1405: cat stderr +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -121082,7 +117590,7 @@ Next token is token ')' (1.3: ) Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -121109,7 +117617,7 @@ Next token is token ')' (1.9: ) Shifting token ')' (1.9: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -121117,7 +117625,7 @@ Entering state 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -121160,84 +117668,40 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.3: syntax error +440. types.at:139: ok +input: +input: +502. calc.at:1390: ./calc.at:1387: cat stderr +./calc.at:1394: cat stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 stderr: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -541. calc.at:1457: testing Calculator C++ parse.error=custom ... stderr: -./calc.at:1457: mv calc.y.tmp calc.y - +stderr: +./calc.at:1407: $PREPARSER ./calc input +input: +input: +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -121248,6 +117712,45 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1431: $PREPARSER ./calc input + ok +./calc.at:1391: cat stderr +./calc.at:1435: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1408: cat stderr + | + | +1 +./calc.at:1426: $PREPARSER ./calc input +1.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '+', expecting end of file + | (1 + # + 1) = 1111 +./calc.at:1403: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1397: cat stderr +stderr: Starting parse Entering state 0 Reading a token @@ -121345,9 +117848,23 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' stderr: -./calc.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + +syntax error + Starting parse Entering state 0 Reading a token @@ -121362,11 +117879,32 @@ Next token is token error (1.1: ) Error: discarding token error (1.1: ) Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -121378,32 +117916,226 @@ $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 8 +Entering state 30 Reading a token Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +514. calc.at:1405: ok +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +528. calc.at:1437: testing Calculator C++ %locations parse.error=verbose api.prefix={calc} %verbose ... +529. calc.at:1438: testing Calculator C++ %locations parse.error=verbose %debug %name-prefix "calc" %verbose ... +./calc.at:1437: mv calc.y.tmp calc.y + +./calc.at:1438: mv calc.y.tmp calc.y + +./calc.at:1437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1389: cat stderr +' expout || exit 77 +input: +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: cat stderr +input: +./calc.at:1414: cat stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1391: $PREPARSER ./calc input + | (1 + # + 1) = 1111 + | (# + 1) = 1111 +./calc.at:1387: $PREPARSER ./calc input +stderr: +./calc.at:1394: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +1.1: syntax error +./calc.at:1416: cat stderr +stderr: +1.3: syntax error, unexpected '+', expecting end of file +530. calc.at:1440: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} %verbose ... +./calc.at:1440: mv calc.y.tmp calc.y + +./calc.at:1440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1393: cat stderr +./calc.at:1395: cat stderr +input: +input: +stderr: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1397: $PREPARSER ./calc input +1.11-17: error: null divisor + | (1 + #) = 1111 +stderr: +stderr: +syntax error: invalid character: '#' +./calc.at:1408: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -121417,6 +118149,61 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +input: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: cat stderr +stderr: +./calc.at:1398: cat stderr +input: +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 + | 1 + 2 * 3 + !* ++ +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1416: $PREPARSER ./calc input +stderr: +input: + | (1 + # + 1) = 1111 +./calc.at:1395: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1389: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -121450,7 +118237,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -121494,9 +118281,43 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error: invalid character: '#' -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1392: cat stderr +1.11-17: error: null divisor +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: cat stderr +./calc.at:1411: cat stderr +syntax error +stderr: +stderr: +stderr: +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -121595,118 +118416,159 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: cat stderr -input: -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: cat stderr -./calc.at:1416: cat stderr -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +syntax error: invalid character: '#' +1.11-17: error: null divisor Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -121719,14 +118581,34 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: cat stderr +stderr: +stdout: +syntax error: invalid character: '#' +./types.at:139: $PREPARSER ./test +input: +input: input: +./calc.at:1438: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: cat stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (# + 1) = 1111 -./calc.at:1414: $PREPARSER ./calc input + | (1 + # + 1) = 1111 + | (1 + 1) / (1 - 1) +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1402: cat stderr +./calc.at:1440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: -./calc.at:1407: cat stderr input: + | (1 + 1) / (1 - 1) +./calc.at:1398: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -121760,7 +118642,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -121804,6 +118686,58 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | (1 + #) = 1111 + | (1 + #) = 1111 +stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: +stderr: +./calc.at:1411: $PREPARSER ./calc input +stderr: +./calc.at:1409: $PREPARSER ./calc input +syntax error: invalid character: '#' +stderr: +error: null divisor +stderr: stderr: Starting parse Entering state 0 @@ -121819,49 +118753,77 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -121874,79 +118836,170 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + #) = 1111 -input: -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1431: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +1.11-17: error: null divisor +1.11-17: error: null divisor Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -121959,7 +119012,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: +======== Testing with C++ standard flags: '' stderr: Starting parse Entering state 0 @@ -121968,25 +119021,33 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token Next token is token ')' (1.7: ) Entering state 11 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -122030,112 +119091,34 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + # + 1) = 1111 -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1413: "$PERL" -pi -e 'use strict; +stderr: +stdout: +./calc.at:1434: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1457: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -syntax error: invalid character: '#' -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -stderr: -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: cat stderr -syntax error: invalid character: '#' -./calc.at:1414: "$PERL" -pi -e 'use strict; +input: +./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122145,6 +119128,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1433: cat stderr + | 1 + 2 * 3 +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1402: $PREPARSER ./calc input +stderr: +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -122222,10 +119213,357 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -stdout: -./types.at:139: ./check +./calc.at:1375: $PREPARSER ./calc --exp input +error: null divisor +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +input: +1.11-17: error: null divisor + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1407: cat stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +' expout || exit 77 +./calc.at:1391: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1403: cat stderr +' expout || exit 77 +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +stderr: +1.11-17: error: null divisor +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1431: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1408: cat stderr + | 1 = 2 = 3 +stderr: + | error +stderr: +stderr: +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1432: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +503. calc.at:1391: ok +input: +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + +stderr: +stderr: + | (1 + 1) / (1 - 1) +1.1: syntax error +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1394: cat stderr +1.7: syntax error +./calc.at:1395: cat stderr +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: cat stderr + | (1 + #) = 1111 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: cat stderr +./calc.at:1387: cat stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1413: cat stderr +stderr: +stderr: +stderr: +1.11-17: error: null divisor +1.11-17: error: null divisor +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1426: cat stderr +./calc.at:1389: cat stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -122254,11 +119592,228 @@ Next token is token error (1.1: ) Error: discarding token error (1.1: ) Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) + | + | +1 +stderr: +./calc.at:1431: $PREPARSER ./calc input +499. calc.at:1375: ok +1.7: syntax error +1.1: syntax error +input: +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +input: +input: +input: +./calc.at:1409: cat stderr + | (1 + 1) / (1 - 1) +input: + | (1 + #) = 1111 +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 2 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1397: cat stderr + | (#) + (#) = 2222 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1434: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 +./calc.at:1398: cat stderr +./calc.at:1394: $PREPARSER ./calc input +input: + | (1 + 1) / (1 - 1) +input: + | (1 + #) = 1111 +./calc.at:1414: $PREPARSER ./calc input +syntax error + | (# + 1) = 1111 +./calc.at:1413: $PREPARSER ./calc input +stderr: +./calc.at:1408: $PREPARSER ./calc input +stderr: +./calc.at:1387: $PREPARSER ./calc input + +1.11-17: error: null divisor +stderr: +./calc.at:1426: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -122309,23 +119864,835 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1414: cat stderr +509. calc.at:1398: | (1 + # + 1) = 1111 +./calc.at:1389: $PREPARSER ./calc input input: -./calc.at:1416: "$PERL" -pi -e 'use strict; +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: + | (# + 1) = 1111 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ok +error: null divisor +./calc.at:1409: $PREPARSER ./calc input +error: null divisor +stderr: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.3: syntax error +./calc.at:1392: cat stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr +syntax error: invalid character: '#' +531. calc.at:1441: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} api.token.prefix={TOK_} %verbose ... +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1393: cat stderr +./calc.at:1374: cat stderr +./calc.at:1441: mv calc.y.tmp calc.y + +508. calc.at:1397: ok +syntax error: invalid character: '#' +syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1455: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: cat stderr + +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +input: +error: null divisor +stderr: +stderr: +syntax error: invalid character: '#' +error: null divisor +input: + | 123 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1374: $PREPARSER ./calc --num input +syntax error: invalid character: '#' +1.3: syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | (1 + 1) / (1 - 1) + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +512. calc.at:1402: ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ok + +syntax error +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122336,23 +120703,236 @@ }eg ' expout || exit 77 input: -./calc.at:1413: $PREPARSER ./calc input +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1392: $PREPARSER ./calc input +stderr: +error: null divisor | (1 + # + 1) = 1111 -./calc.at:1414: $PREPARSER ./calc input +./calc.at:1393: $PREPARSER ./calc input +input: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1403: cat stderr +532. calc.at:1443: testing Calculator C++ %header %locations parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1443: mv calc.y.tmp calc.y + +./calc.at:1433: cat stderr +./calc.at:1432: cat stderr +513. calc.at:1403: ok +./calc.at:1443: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + + +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: cat stderr +./calc.at:1414: cat stderr +stderr: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1416: cat stderr +533. calc.at:1445: testing Calculator C++ parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +syntax error: invalid character: '#' +stderr: +stderr: +./calc.at:1445: mv calc.y.tmp calc.y + +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +error: null divisor +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 = 2 = 3 +./calc.at:1433: $PREPARSER ./calc input +./types.at:139: ./check +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; stderr: + | + | +1 +./calc.at:1408: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1407: cat stderr stderr: -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1431: cat stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error: invalid character: '#' +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1409: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +1.7: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: Starting parse Entering state 0 Reading a token @@ -122422,17 +121002,191 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1434: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc + | (1 + #) = 1111 + | (# + 1) = 1111 +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1416: $PREPARSER ./calc input +input: +2.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: $PREPARSER ./calc /dev/null +535. calc.at:1448: testing Calculator C++ %header %locations api.location.file=none ... +input: + | (# + 1) = 1111 +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1395: cat stderr +534. calc.at:1446: testing Calculator C++ %header %locations parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... + | (1 + # + 1) = 1111 +./calc.at:1408: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.7: syntax error +input: +stderr: +./calc.at:1434: cat stderr +stderr: +./calc.at:1446: mv calc.y.tmp calc.y + + | (1 + # + 1) = 1111 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1409: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1448: mv calc.y.tmp calc.y + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1394: cat stderr + | 1 + 2 * 3 +./calc.at:1374: $PREPARSER ./calc --num input +2.1: syntax error +stderr: Starting parse Entering state 0 Reading a token @@ -122472,7 +121226,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -122516,12 +121270,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1431: cat stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -input: Starting parse Entering state 0 Reading a token @@ -122529,18 +121277,26 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token Next token is token ')' (1.7: ) Entering state 11 @@ -122591,8 +121347,25 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: | (# + 1) = 1111 -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1407: $PREPARSER ./calc input +stdout: +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -122632,7 +121405,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -122676,41 +121449,98 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1407: cat stderr -input: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1389: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +syntax error +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +./calc.at:1387: cat stderr +./calc.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./types.at:139: $PREPARSER ./test + | 1//2 +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: $PREPARSER ./calc input ./calc.at:1434: $PREPARSER ./calc input -stdout: - | (1 + # + 1) = 1111 -./calc.at:1431: $PREPARSER ./calc input -stderr: stderr: -./calc.at:1433: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -Starting parse +507. calc.at:1395: Starting parse Entering state 0 Reading a token Next token is token '(' (1.1: ) @@ -122779,57 +121609,9 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -input: - | (1 + 1) / (1 - 1) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: cat stderr -stderr: -stderr: -stderr: -./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stdout: +536. calc.at:1449: testing Calculator C++ %header %locations api.location.file="my-location.hh" ... +./calc.at:1449: mv calc.y.tmp calc.y + Starting parse Entering state 0 Reading a token @@ -122899,6 +121681,37 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1392: cat stderr +./calc.at:1411: cat stderr +stderr: +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: cat stderr +stderr: +input: +500. calc.at:1387: stderr: +stderr: + ok +syntax error + ok +stderr: +stderr: + | (1 + 1) / (1 - 1) +error: null divisor +stderr: +syntax error +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.3: syntax error +./calc.at:1389: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -122906,101 +121719,55 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) Reading a token Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 +Error: discarding token '+' (1.1: ) Reading a token Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 +Error: discarding token "number" (1.1: 1) Reading a token Next token is token ')' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 +Entering state 11 Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): +Entering state 26 +Reducing stack 0 by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) + $2 = token error (1.1: ) $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 Reading a token Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -123015,13 +121782,7 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) stderr: -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -stderr: -./types.at:139: $PREPARSER ./test -input: -./calc.at:1413: cat stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123031,9 +121792,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; Starting parse Entering state 0 Reading a token @@ -123041,138 +121821,97 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: - | 1 2 -./calc.at:1434: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1414: $PREPARSER ./calc input -stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error -./calc.at:1416: cat stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: stderr: ./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Reading a token @@ -123192,90 +121931,136 @@ Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +Entering state 11 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -123288,36 +122073,101 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +error: null divisor +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1414: cat stderr +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +input: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: cat stderr stderr: +./calc.at:1393: cat stderr + | (1 + # + 1) = 1111 +504. calc.at:1392: ./calc.at:1411: $PREPARSER ./calc input +./calc.at:1449: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.3: syntax error +./calc.at:1443: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS + ok +./calc.at:1432: cat stderr +stderr: +./calc.at:1433: cat stderr +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +error: null divisor +' expout || exit 77 +' expout || exit 77 +./calc.at:1445: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + input: -======== Testing with C++ standard flags: '' -input: +441. types.at:139: ok | (1 + # + 1) = 1111 -./calc.at:1416: $PREPARSER ./calc input -stdout: - | 1 2 -./calc.at:1433: $PREPARSER ./calc input +input: +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (1 + 1) / (1 - 1) | (1 + # + 1) = 1111 ./calc.at:1413: $PREPARSER ./calc input -./calc.at:1431: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stderr: -./calc.at:1432: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1432: $PREPARSER ./calc /dev/null +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123327,6 +122177,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: + +stderr: Starting parse Entering state 0 Reading a token @@ -123410,19 +122264,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -input: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -123525,90 +122366,58 @@ Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -123621,29 +122430,84 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.3: syntax error - | (1 + 1) / (1 - 1) -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + +537. calc.at:1451: testing Calculator C++ %no-lines %header %locations api.location.file="my-location.hh" ... +./calc.at:1451: mv calc.y.tmp calc.y + +./calc.at:1451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +540. calc.at:1455: testing Calculator C++ %locations parse.lac=full parse.error=detailed parse.trace ... +539. calc.at:1454: testing Calculator C++ %locations parse.lac=full parse.error=detailed ... +./calc.at:1455: mv calc.y.tmp calc.y + +./calc.at:1454: mv calc.y.tmp calc.y + +./calc.at:1455: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +538. calc.at:1453: testing Calculator C++ %locations parse.lac=full parse.error=verbose ... +input: +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: cat stderr +./calc.at:1449: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1408: cat stderr +./calc.at:1416: cat stderr + | + | +1 +./calc.at:1409: cat stderr ./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1433: $PREPARSER ./calc input +stderr: +./calc.at:1453: mv calc.y.tmp calc.y + stderr: +stderr: +error: null divisor +stderr: +1.1: syntax error +./calc.at:1453: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +2.1: syntax error input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1407: cat stderr +input: +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | (1 + 1) / (1 - 1) + | (1 + 1) / (1 - 1) +./calc.at:1434: cat stderr +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1409: $PREPARSER ./calc input + | (# + 1) = 1111 + | (1 + # + 1) = 1111 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1407: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -123727,15 +122591,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1434: cat stderr -stderr: -stderr: -error: null divisor -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error Starting parse Entering state 0 Reading a token @@ -123819,22 +122674,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -515. calc.at:1407: input: - ok - | 1//2 -./calc.at:1434: $PREPARSER ./calc input -stderr: -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123844,15 +122684,92 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1374: cat stderr stderr: -stderr: -1.3: syntax error -error: null divisor -./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: cat stderr -./calc.at:1416: cat stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123862,42 +122779,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.1: syntax error stderr: -1.3: syntax error -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: - - | 1 2 -./calc.at:1432: $PREPARSER ./calc input -input: -./calc.at:1413: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1416: $PREPARSER ./calc input -520. calc.at:1414: stderr: -./calc.at:1433: cat stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - ok -1.3: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1431: cat stderr +error: null divisor + | error +./calc.at:1434: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -123926,7 +122814,7 @@ Entering state 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -123935,7 +122823,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -123971,7 +122859,7 @@ Entering state 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -123980,7 +122868,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -123988,7 +122876,7 @@ Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): +Reducing stack 0 by rule 10 (line 108): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -124013,23 +122901,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error -./calc.at:1433: $PREPARSER ./calc input -input: stderr: Starting parse Entering state 0 @@ -124038,6 +122911,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: cat stderr +stderr: +stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +2.1: syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token Next token is token "number" (1.2: 1) Shifting token "number" (1.2: 1) Entering state 1 @@ -124059,7 +123024,7 @@ Entering state 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -124068,7 +123033,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -124104,7 +123069,7 @@ Entering state 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -124113,7 +123078,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -124121,7 +123086,7 @@ Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): +Reducing stack 0 by rule 10 (line 108): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -124146,14 +123111,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + 1) / (1 - 1) -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1434: cat stderr -stderr: -stderr: -stderr: -1.3: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -124182,7 +123139,7 @@ Entering state 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -124191,7 +123148,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -124227,7 +123184,7 @@ Entering state 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -124236,7 +123193,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -124244,7 +123201,7 @@ Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): +Reducing stack 0 by rule 10 (line 108): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -124269,35 +123226,87 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: cat stderr ./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: - | error -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1437: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - stderr: -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124308,10 +123317,105 @@ }eg ' expout || exit 77 stderr: -1.3: syntax error -stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -542. calc.at:1458: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1426: cat stderr Starting parse Entering state 0 Reading a token @@ -124340,7 +123444,7 @@ Entering state 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -124349,7 +123453,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -124385,7 +123489,7 @@ Entering state 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -124394,7 +123498,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -124402,7 +123506,7 @@ Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): +Reducing stack 0 by rule 10 (line 108): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -124427,17 +123531,15 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stdout: - -1.1: syntax error -523. calc.at:1431: ./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./calc.at:1458: mv calc.y.tmp calc.y - +stderr: +./calc.at:1389: cat stderr input: -./calc.at:1416: cat stderr - ok -./calc.at:1433: "$PERL" -pi -e 'use strict; + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1 + 2 * 3 +./calc.at:1431: $PREPARSER ./calc input +stderr: +506. calc.at:1394: ok +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124447,27 +123549,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1437: $PREPARSER ./calc input -1.1: syntax error -stderr: -521. calc.at:1416: ok -./calc.at:1413: "$PERL" -pi -e 'use strict; +./calc.at:1413: cat stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124477,13 +123560,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: cat stderr -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: cat stderr -./calc.at:1413: cat stderr - -input: -./calc.at:1434: "$PERL" -pi -e 'use strict; +1.1: syntax error +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124493,36 +123571,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 -./calc.at:1432: $PREPARSER ./calc input -stderr: -input: -519. calc.at:1413: ok -stderr: -./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | error -./calc.at:1433: $PREPARSER ./calc input - -1.3: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -543. calc.at:1459: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} parse.lac=full ... -./calc.at:1459: mv calc.y.tmp calc.y - -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1459: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -1.1: syntax error - | 1 2 -stderr: -1.3: syntax error -./calc.at:1437: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: $PREPARSER ./calc --exp input +./calc.at:1414: cat stderr stderr: -./calc.at:1433: "$PERL" -pi -e 'use strict; +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: cat stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124532,12 +123587,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error, unexpected number -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -544. calc.at:1468: testing Calculator glr.cc ... -./calc.at:1468: mv calc.y.tmp calc.y - -./calc.at:1432: "$PERL" -pi -e 'use strict; +501. calc.at:1389: ok +input: +./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124547,96 +123599,129 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1468: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1432: cat stderr stderr: -1.3: syntax error, unexpected number -./calc.at:1433: cat stderr stderr: -stdout: -545. calc.at:1469: testing Calculator glr2.cc ... -./calc.at:1437: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 + | (!!) + (1 2) = 1 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1433: cat stderr +442. types.at:139: ./calc.at:1455: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + ok +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1458: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1440: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - +./calc.at:1433: $PREPARSER ./calc /dev/null my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1469: mv calc.y.tmp calc.y - -./calc.at:1432: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: - | error -./calc.at:1432: $PREPARSER ./calc input -stderr: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -1.7: syntax error -stderr: -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: cat stderr -546. calc.at:1476: testing Calculator C++ %glr-parser ... -./calc.at:1476: mv calc.y.tmp calc.y - -./calc.at:1434: cat stderr -stderr: -1.1: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: -stderr: -1.7: syntax error -1.1: syntax error - | 1//2 -./calc.at:1437: $PREPARSER ./calc input -input: - | 1 = 2 = 3 stderr: -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1433: "$PERL" -pi -e 'use strict; +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; +input: +517. calc.at:1409: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1459: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1432: "$PERL" -pi -e 'use strict; +./calc.at:1454: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: + | (1 + 1) / (1 - 1) + ok +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 + + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124646,281 +123731,2228 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: cat stderr +input: +./calc.at:1414: $PREPARSER ./calc input +input: +./calc.at:1393: cat stderr +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1451: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS + | (1 + 1) / (1 - 1) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1411: $PREPARSER ./calc input stderr: stderr: -1.7: syntax error +./calc.at:1432: $PREPARSER ./calc input Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 27 -Stack now 0 8 18 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error +error: 2222 != 1 + +./calc.at:1416: cat stderr +./calc.at:1408: cat stderr +stderr: +stderr: +stderr: +./calc.at:1434: cat stderr +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +505. calc.at:1393: Starting parse +Entering state 0 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 6 8 20 29 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) +Now at end of input. +Shifting token "end of input" (2.1: ) Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + ok + | (1 + 1) / (1 - 1) +./calc.at:1407: $PREPARSER ./calc input +Starting parse +Entering state 0 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +498. calc.at:1374: ./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + +1.1: syntax error +stderr: +input: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + ok +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +516. calc.at:1408: | 1 = 2 = 3 +syntax error +error: 2222 != 1 + ok +./calc.at:1434: $PREPARSER ./calc input +stderr: + +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: + | (1 + # + 1) = 1111 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.7: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.7: syntax error +./calc.at:1433: cat stderr + +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: cat stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +stderr: +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1413: cat stderr + +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1431: cat stderr +518. calc.at:1411: ok +541. calc.at:1457: testing Calculator C++ parse.error=custom ... +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1457: mv calc.y.tmp calc.y + +./calc.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1433: $PREPARSER ./calc input +input: +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1432: cat stderr +542. calc.at:1458: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +543. calc.at:1459: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} parse.lac=full ... +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + | (!!) + (1 2) = 1 +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: mv calc.y.tmp calc.y + +stderr: +stderr: +syntax error +error: 2222 != 1 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1458: mv calc.y.tmp calc.y + + +./calc.at:1458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: cat stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +519. calc.at:1413: ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ok +input: + | (!!) + (1 2) = 1 +./calc.at:1432: $PREPARSER ./calc input +545. calc.at:1469: testing Calculator glr2.cc ... +./calc.at:1459: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +546. calc.at:1476: testing Calculator C++ %glr-parser ... +547. calc.at:1476: testing Calculator glr2.cc ... +544. calc.at:1468: testing Calculator glr.cc ... +./calc.at:1426: cat stderr +stderr: +syntax error +error: 2222 != 1 +./calc.at:1434: cat stderr +./calc.at:1476: mv calc.y.tmp calc.y + +515. calc.at:1407: ok +./calc.at:1476: mv calc.y.tmp calc.y + +./calc.at:1469: mv calc.y.tmp calc.y +./calc.at:1468: mv calc.y.tmp calc.y + + +./calc.at:1416: cat stderr +./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +./calc.at:1468: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +input: +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1414: cat stderr + | + | +1 +./calc.at:1434: $PREPARSER ./calc input +stderr: +input: + +./calc.at:1426: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1416: $PREPARSER ./calc input + +548. calc.at:1477: testing Calculator C++ %glr-parser %locations ... +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +stderr: +2.1: syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +520. calc.at:1414: syntax error +syntax error +error: 2222 != 1 + ok +./calc.at:1477: mv calc.y.tmp calc.y + +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error +./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1459: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +./calc.at:1457: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error +syntax error +error: 2222 != 1 +./calc.at:1433: cat stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1458: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + +./calc.at:1431: cat stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stdout: + | (- *) + (1 2) = 1 +input: +./calc.at:1431: $PREPARSER ./calc input +550. calc.at:1478: testing Calculator C++ %glr-parser %locations api.location.type={Span} ... + | (!!) + (1 2) = 1 +./types.at:139: $PREPARSER ./test +./calc.at:1478: mv calc.y.tmp calc.y + +./calc.at:1433: $PREPARSER ./calc input +549. calc.at:1477: testing Calculator glr2.cc %locations ... +./calc.at:1476: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1416: cat stderr +stderr: +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1477: mv calc.y.tmp calc.y +stderr: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: cat stderr +./calc.at:1432: cat stderr +./calc.at:1469: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +521. calc.at:1416: ./calc.at:1468: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + ok +./calc.at:1426: cat stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +./calc.at:1434: $PREPARSER ./calc /dev/null +syntax error +syntax error +error: 2222 != 1 +1.11: syntax error +1.1-16: error: 2222 != 1 +439. types.at:139: ok +551. calc.at:1478: testing Calculator glr2.cc %locations api.location.type={Span} ... +./calc.at:1478: mv calc.y.tmp calc.y + +./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + + +552. calc.at:1479: testing Calculator C++ %glr-parser %header parse.error=verbose %name-prefix "calc" %verbose ... +./calc.at:1479: mv calc.y.tmp calc.y + +./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +553. calc.at:1479: testing Calculator glr2.cc %header parse.error=verbose %name-prefix "calc" %verbose ... +./calc.at:1479: mv calc.y.tmp calc.y + +./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +./calc.at:1478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: +./calc.at:1477: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | (* *) + (*) + (*) +./calc.at:1426: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1432: $PREPARSER ./calc input +stderr: +1.1: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +syntax error +syntax error +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +syntax error +syntax error +syntax error +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1479: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1477: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +stdout: +./calc.at:1437: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: cat stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1431: cat stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (- *) + (1 2) = 1 +./calc.at:1433: $PREPARSER ./calc input +stderr: +./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +./calc.at:1434: cat stderr + | (* *) + (*) + (*) +./calc.at:1431: $PREPARSER ./calc input +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1432: cat stderr +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +stderr: +stdout: +./calc.at:1438: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr +input: + | 1 2 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1438: $PREPARSER ./calc input +stderr: +stdout: +./calc.at:1440: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: + | (* *) + (*) + (*) +./calc.at:1432: $PREPARSER ./calc input +stderr: +syntax error +syntax error +syntax error +stderr: +stderr: +1.3: syntax error, unexpected number +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1440: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) @@ -125667,13 +126699,10 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1441: "$PERL" -ne ' +./calc.at:1435: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -125682,12 +126711,33 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc + )' calc.cc calc.hh -./calc.at:1433: cat stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1426: $PREPARSER ./calc input stderr: -1.7: syntax error stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1435: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -126705,82 +127755,6 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1432: cat stderr -./calc.at:1469: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 -./calc.at:1433: $PREPARSER ./calc input -stderr: -input: -2.1: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1437: cat stderr -stderr: - | 1 2 -stderr: -./calc.at:1440: $PREPARSER ./calc input -2.1: syntax error -1.7: syntax error -input: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1434: $PREPARSER ./calc input -stderr: -./calc.at:1468: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: stderr: Starting parse Entering state 0 @@ -127799,12 +128773,42 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error -input: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1453: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +stderr: +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -128822,9 +129826,99 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -2.1: syntax error +./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.3: syntax error, unexpected number +input: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1453: $PREPARSER ./calc input +stderr: +input: +input: + | 1 2 +./calc.at:1440: $PREPARSER ./calc input +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error + | 1 2 +./calc.at:1438: $PREPARSER ./calc input +stderr: +stdout: +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. +stdout: +stderr: +./calc.at:1468: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stdout: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stdout: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -128845,22 +129939,11 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1437: $PREPARSER ./calc input -stderr: input: -./calc.at:1434: "$PERL" -pi -e 'use strict; +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: cat stderr Starting parse Entering state 0 Stack now 0 @@ -128881,13 +129964,35 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 - | 1 2 +./calc.at:1433: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +443. types.at:139: | 1 + 2 * 3 + !- ++ + ok +./calc.at:1441: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. +input: +./calc.at:1437: cat stderr stderr: -./calc.at:1441: $PREPARSER ./calc input -1.1: syntax error, unexpected invalid token -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: $PREPARSER ./calc /dev/null -./calc.at:1432: "$PERL" -pi -e 'use strict; +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -128897,11 +130002,53 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1426: $PREPARSER ./calc input + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +input: +./calc.at:1468: $PREPARSER ./calc input stderr: + | 1 2 +input: +input: stderr: -1.1: syntax error +./calc.at:1434: cat stderr +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -1.1: syntax error, unexpected invalid token + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1454: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -128922,22 +130069,23 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: cat stderr -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +input: +./calc.at:1459: $PREPARSER ./calc input stderr: -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: $PREPARSER ./calc /dev/null stderr: Starting parse Entering state 0 @@ -128959,35 +130107,67 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -1.1: syntax error -./calc.at:1432: cat stderr + | (* *) + (*) + (*) stderr: -1.1: syntax error -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: cat stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1433: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected number +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +./calc.at:1441: $PREPARSER ./calc input +input: +input: +stderr: + | (!!) + (1 2) = 1 + +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1434: $PREPARSER ./calc input +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: cat stderr + | 1 2 +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1453: $PREPARSER ./calc input my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1437: cat stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; +stderr: +stderr: +1.3: syntax error, unexpected number +stdout: +./calc.at:1458: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -128997,31 +130177,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | - | +1 -./calc.at:1432: $PREPARSER ./calc input stderr: -1.1: syntax error -./calc.at:1441: cat stderr +./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + stderr: -2.1: syntax error input: -input: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1440: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1437: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 stderr: stderr: -2.1: syntax error -stdout: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1440: cat stderr stderr: +stdout: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: cat stderr +./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -./calc.at:1438: "$PERL" -ne ' + | 1 2 +./calc.at:1445: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. +stderr: (eof || $. == 1) && /^\s*$/ # No trailing space. || /\s$/ @@ -129029,38 +130217,12 @@ || /\t/ )' calc.cc -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -1.7: syntax error, unexpected '=' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 input: -./calc.at:1434: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -129070,12 +130232,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1433: cat stderr -stderr: -1.7: syntax error, unexpected '=' -input: +1.3: syntax error, unexpected number | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -129089,63 +130246,7 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1438: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 +./calc.at:1458: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -130164,20 +131265,17 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: "$PERL" -pi -e 'use strict; +554. calc.at:1480: testing Calculator C++ %glr-parser parse.error=verbose api.prefix={calc} %verbose ... +./calc.at:1480: mv calc.y.tmp calc.y + +./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +input: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130187,38 +131285,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1433: $PREPARSER ./calc input + | 1 2 +./calc.at:1438: cat stderr +./calc.at:1459: $PREPARSER ./calc input +stdout: stderr: stderr: -./calc.at:1437: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 + | 1 + 2 * 3 + !+ ++ +./calc.at:1432: $PREPARSER ./calc input +stderr: +./calc.at:1457: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + Starting parse Entering state 0 Stack now 0 @@ -131236,15 +132322,35 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +syntax error +input: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1445: $PREPARSER ./calc input stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +input: +input: +./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: stdout: -./calc.at:1445: "$PERL" -ne ' +stdout: + | 1//2 + | 1 2 +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -131255,16 +132361,32 @@ || /\t/ )' calc.cc -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1432: cat stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1440: $PREPARSER ./calc input input: -./calc.at:1434: cat stderr -./calc.at:1432: $PREPARSER ./calc /dev/null - | - | +1 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1441: "$PERL" -pi -e 'use strict; +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +1.3: syntax error, unexpected number + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131275,27 +132397,7 @@ }eg ' expout || exit 77 input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: -./calc.at:1440: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131305,18 +132407,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1445: $PREPARSER ./calc input -1.1: syntax error input: -input: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -stderr: -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -131327,10 +132418,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1440: cat stderr + | 1//2 +./calc.at:1435: cat stderr + | (#) + (#) = 2222 +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1438: $PREPARSER ./calc input stderr: -./calc.at:1441: cat stderr +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1457: $PREPARSER ./calc input +1.3: syntax error, unexpected number +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: Starting parse Entering state 0 @@ -131346,16 +132460,19 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -1.1: syntax error -2.1: syntax error, unexpected '+' -stderr: -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -132374,19 +133491,59 @@ Cleanup: popping token end of input () Cleanup: popping nterm input () stderr: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +syntax error input: - | error -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1477: $PREPARSER ./calc input +input: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 2 ./calc.at:1441: $PREPARSER ./calc input - | error +stderr: +stderr: +stderr: +stderr: +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) Starting parse Entering state 0 Stack now 0 @@ -132401,42 +133558,21 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1433: cat stderr -stderr: stderr: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -133454,134 +134590,81 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +input: +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1437: cat stderr + | 1 2 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: $PREPARSER ./calc input stderr: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -stdout: -./calc.at:1435: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -input: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | (!!) + (1 2) = 1 -./calc.at:1432: cat stderr +stderr: +stderr: +1.3: syntax error, unexpected number Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1437: cat stderr -input: - | 1 2 -./calc.at:1445: $PREPARSER ./calc input -stderr: -./calc.at:1438: cat stderr -stderr: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -./calc.at:1437: $PREPARSER ./calc /dev/null +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (2) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1434: cat stderr -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1426: "$PERL" -ne ' + | 1//2 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: $PREPARSER ./calc input +stderr: +stdout: +stderr: +stdout: +./calc.at:1449: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -133592,40 +134675,20 @@ || /\t/ )' calc.cc calc.hh -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -1.1: syntax error, unexpected end of input -./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1440: "$PERL" -pi -e 'use strict; +./calc.at:1478: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1434: cat stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -133635,52 +134698,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1433: cat stderr input: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1441: cat stderr - | 1//2 -./calc.at:1438: $PREPARSER ./calc input +./calc.at:1453: cat stderr +./calc.at:1459: cat stderr + | 1 + 2 * 3 + !- ++ stderr: +./calc.at:1432: $PREPARSER ./calc input stderr: - | (!!) + (1 2) = 1 -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token number (2) -Stack now 0 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1440: cat stderr stderr: stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 stderr: Starting parse Entering state 0 @@ -133709,8 +134736,14 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' input: -./calc.at:1433: "$PERL" -pi -e 'use strict; +input: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -133720,65 +134753,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -1.1: syntax error, unexpected end of input - | 1 = 2 = 3 | 1 2 -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error stderr: -1.3: syntax error, unexpected number +./calc.at:1476: $PREPARSER ./calc input stderr: -./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1437: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -133794,67 +134775,50 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +input: +input: +input: +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1433: cat stderr input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) @@ -133862,21 +134826,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 = 2 = 3 -./calc.at:1440: $PREPARSER ./calc input +input: +input: +input: +./calc.at:1480: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | 1 + 2 * 3 + !- ++ + | (- *) + (1 2) = 1 + | 1 2 + | 1//2 +./calc.at:1431: $PREPARSER ./calc input + | 1 2 +./calc.at:1445: $PREPARSER ./calc input + | 1//2 + | 1 + 2 * 3 + !+ ++ +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1433: $PREPARSER ./calc input stderr: -1.3: syntax error, unexpected number -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: "$PERL" -pi -e 'use strict; +./calc.at:1457: $PREPARSER ./calc input +stderr: +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1459: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.1: syntax error, unexpected invalid token +syntax error +input: +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -133886,14 +134864,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: cat stderr | 1 2 -./calc.at:1426: $PREPARSER ./calc input -stderr: - | (- *) + (1 2) = 1 -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -133903,49 +134878,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr +stderr: +stderr: +stderr: +stderr: +./calc.at:1454: cat stderr +stderr: +stderr: +stderr: +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token number (2) Stack now 0 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -133955,90 +134927,96 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1434: "$PERL" -pi -e 'use strict; +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: cat stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: cat stderr +stderr: +stderr: +1.1: syntax error, unexpected invalid token +stderr: +1.3: syntax error +./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1441: cat stderr ./calc.at:1438: cat stderr +./calc.at:1432: cat stderr + | 1//2 + | 1 2 +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input + | 1//2 +stderr: +stderr: +./calc.at:1468: $PREPARSER ./calc input +stderr: +stderr: +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token number (2) Stack now 0 -input: -./calc.at:1432: cat stderr -./calc.at:1441: cat stderr -./calc.at:1437: cat stderr -./calc.at:1435: cat stderr - | 1//2 -./calc.at:1445: $PREPARSER ./calc input -stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 - | error +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' stderr: -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1440: "$PERL" -pi -e 'use strict; +input: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +input: +input: + | error +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134048,38 +135026,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1434: cat stderr -syntax error stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Stack now 0 8 -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token '/' () -Stack now 0 +./calc.at:1440: $PREPARSER ./calc input stderr: -input: +stderr: + | 1//2 + | 1 + 2 * 3 + !- ++ +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.3: syntax error +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error Starting parse Entering state 0 Stack now 0 @@ -134088,19 +135050,13 @@ 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 +stderr: +stderr: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' input: -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -./calc.at:1440: cat stderr - | 1//2 - | (!!) + (1 2) = 1 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | - | +1 -./calc.at:1433: "$PERL" -pi -e 'use strict; +./calc.at:1426: cat stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134110,54 +135066,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1441: $PREPARSER ./calc input -stderr: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: cat stderr +./calc.at:1437: cat stderr +input: +input: + | 1 2 stderr: +input: + | error stderr: stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -input: - | (- *) + (1 2) = 1 -./calc.at:1434: $PREPARSER ./calc input -input: -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Stack now 0 8 -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token '/' () -Stack now 0 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1449: $PREPARSER ./calc input + | (#) + (#) = 2222 + | 1//2 +./calc.at:1432: $PREPARSER ./calc input +syntax error +./calc.at:1459: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134167,15 +135094,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -stderr: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1440: $PREPARSER ./calc input -stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; stderr: Starting parse Entering state 0 @@ -134185,47 +135107,15 @@ 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: $PREPARSER ./calc input stderr: -./calc.at:1433: cat stderr -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134236,85 +135126,48 @@ }eg ' expout || exit 77 stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1445: "$PERL" -pi -e 'use strict; +input: +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +input: +./calc.at:1453: cat stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: cat stderr -input: -./calc.at:1435: "$PERL" -pi -e 'use strict; +' expout || exit 77 + | 1 = 2 = 3 + | error +1.3: syntax error +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134324,46 +135177,62 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (* *) + (*) + (*) stderr: -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1437: cat stderr +stderr: +1.3: syntax error +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +./calc.at:1457: cat stderr +stdout: stdout: -./calc.at:1445: cat stderr -./calc.at:1435: cat stderr stderr: -./calc.at:1454: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | error + | error +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1434: cat stderr +input: +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -input: - | (!!) + (1 2) = 1 -./calc.at:1437: $PREPARSER ./calc input +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1455: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. (eof || $. == 1) && /^\s*$/ # No trailing space. || /\s$/ @@ -134371,11 +135240,22 @@ || /\t/ )' calc.cc -input: -./calc.at:1438: cat stderr - | error -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1441: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1454: cat stderr + | (1 + #) = 1111 +./calc.at:1426: $PREPARSER ./calc input +1.3: syntax error +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134386,7 +135266,10 @@ }eg ' expout || exit 77 stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +1.7: syntax error, unexpected '=' +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134396,12 +135279,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: cat stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; +./calc.at:1476: cat stderr +./calc.at:1477: cat stderr +./calc.at:1431: cat stderr +./calc.at:1445: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr +input: +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134411,11 +135296,64 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: stderr: + | 1//2 stderr: -./calc.at:1441: $PREPARSER ./calc /dev/null +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./calc.at:1440: cat stderr +input: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +447. types.at:139: ok stderr: +input: + | 1//2 +./calc.at:1476: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected invalid token +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -134429,14 +135367,16 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1454: $PREPARSER ./calc input +stderr: 1.1: syntax error, unexpected invalid token -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1440: "$PERL" -pi -e 'use strict; +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1455: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134446,114 +135386,151 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + | error +stderr: +./calc.at:1458: cat stderr +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: input: +./calc.at:1454: $PREPARSER ./calc input + | (* *) + (*) + (*) + | 1//2 +syntax error: invalid character: '#' +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +syntax error +./calc.at:1434: $PREPARSER ./calc input | 1//2 -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1438: $PREPARSER ./calc input - | error -./calc.at:1445: $PREPARSER ./calc input stderr: +input: +./types.at:139: $PREPARSER ./test stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 +./calc.at:1468: cat stderr +./calc.at:1477: $PREPARSER ./calc input +1.7: syntax error, unexpected '=' +./calc.at:1445: $PREPARSER ./calc input +1.1: syntax error, unexpected invalid token +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1433: $PREPARSER ./calc input stderr: +input: stderr: stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +input: + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + + | (#) + (#) = 2222 +1.2: syntax error +1.10: syntax error +1.16: syntax error + | 1 = 2 = 3 1.1: syntax error, unexpected invalid token syntax error -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: $PREPARSER ./calc input stderr: -./calc.at:1434: cat stderr -./calc.at:1432: cat stderr +stderr: +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: cat stderr +input: +stderr: +input: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + | error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '/' () Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; +1.1: syntax error, unexpected invalid token +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: cat stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1459: cat stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: $PREPARSER ./calc input stderr: -./calc.at:1440: cat stderr -stderr: -syntax error stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -134593,62 +135570,1188 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -input: -input: -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: cat stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1438: cat stderr +stderr: +stderr: +input: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error +1.1: syntax error, unexpected invalid token +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: cat stderr +stderr: +stderr: +445. types.at:139: | 1//2 + ok input: -./calc.at:1435: "$PERL" -pi -e 'use strict; +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +./calc.at:1453: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - | (* *) + (*) + (*) my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +stderr: +stderr: +stderr: + | 1//2 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R9 G29 R7 G8 S18 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 +Reducing stack by rule 11 (line 115): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 105): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G10 R11 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 115): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R11 G12 S26 +Reducing stack by rule 11 (line 115): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (5.8: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' (9.7: ) +LAC: initial context established for '-' +LAC: checking lookahead '-': R8 G8 S19 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +LAC: initial context discarded due to shift +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 19 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 19 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 +Reading a token +Next token is token ')' (10.11: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G32 R12 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 116): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R12 G12 S26 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (14.1: ) +Shifting token end of file (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: $PREPARSER ./calc input - | 1 2 -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1440: $PREPARSER ./calc /dev/null -stderr: -stderr: -stderr: +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1478: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -1.3: syntax error, unexpected number -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1437: cat stderr -./calc.at:1433: cat stderr -./calc.at:1438: "$PERL" -pi -e 'use strict; +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134658,34 +136761,56 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +1.3: syntax error +./types.at:139: $PREPARSER ./test stderr: +./calc.at:1432: cat stderr stderr: +syntax error stderr: +input: +1.3: syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Stack now 0 8 +Error: popping nterm exp (1) Stack now 0 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; +Cleanup: discarding lookahead token '/' () +Stack now 0 +./calc.at:1435: cat stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error -1.10: syntax error -1.16: syntax error -1.3: syntax error, unexpected number -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134695,59 +136820,1161 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1433: $PREPARSER ./calc input +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: cat stderr +input: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | 1 = 2 = 3 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -stderr: -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R9 G29 R7 G8 S18 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 +Reducing stack by rule 11 (line 115): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 105): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G10 R11 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 115): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R11 G12 S26 +Reducing stack by rule 11 (line 115): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (5.8: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' (9.7: ) +LAC: initial context established for '-' +LAC: checking lookahead '-': R8 G8 S19 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +LAC: initial context discarded due to shift +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 19 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 19 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 +Reading a token +Next token is token ')' (10.11: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G32 R12 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 116): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R12 G12 S26 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (14.1: ) +Shifting token end of file (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) + | error +./calc.at:1457: $PREPARSER ./calc input input: -./calc.at:1441: cat stderr - | 1 = 2 = 3 -./calc.at:1454: "$PERL" -pi -e 'use strict; +./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1438: cat stderr -./calc.at:1426: cat stderr +./calc.at:1454: cat stderr +input: +./calc.at:1459: $PREPARSER ./calc input +1.3: syntax error stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; +stderr: + | 1 = 2 = 3 + +./calc.at:1453: $PREPARSER ./calc input +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | (1 + #) = 1111 +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.3: syntax error +./calc.at:1434: cat stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ' expout || exit 77 -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1432: "$PERL" -pi -e 'use strict; + | error +./calc.at:1426: cat stderr +./calc.at:1440: cat stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134757,40 +137984,50 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 + | 1 = 2 = 3 +input: 1.7: syntax error, unexpected '=' +stderr: +input: +input: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1433: cat stderr +1.3: syntax error input: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr input: -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: cat stderr - | error +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; | | +1 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1426: $PREPARSER ./calc input -input: - | 1//2 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1437: $PREPARSER ./calc input + | 1 = 2 = 3 +' expout || exit 77 ./calc.at:1454: $PREPARSER ./calc input -stderr: -stderr: -stdout: -stderr: -./calc.at:1432: cat stderr -./calc.at:1440: cat stderr +./calc.at:1435: $PREPARSER ./calc input +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./types.at:139: $PREPARSER ./test -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134800,54 +138037,110 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1434: cat stderr + | 1 = 2 = 3 +stderr: + | 1 2 input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1445: cat stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +stderr: stderr: input: -stdout: -./calc.at:1453: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - | 1 + 2 * 3 + !- ++ -./calc.at:1433: $PREPARSER ./calc input - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -./calc.at:1441: $PREPARSER ./calc input +451. types.at:139: ok + | + | +1 +1.7: syntax error, unexpected '=' +stderr: +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1477: cat stderr stderr: + | error +2.1: syntax error, unexpected '+' +./calc.at:1476: $PREPARSER ./calc input +input: stderr: stderr: +1.6: syntax error: invalid character: '#' 1.7: syntax error, unexpected '=' -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: cat stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: cat stderr + | (1 + #) = 1111 +stderr: +555. calc.at:1480: testing Calculator glr2.cc parse.error=verbose api.prefix={calc} %verbose ... +stderr: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error, unexpected '=' input: -syntax error - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +stderr: +stderr: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1480: mv calc.y.tmp calc.y + + | error +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1468: cat stderr Starting parse Entering state 0 Stack now 0 @@ -134873,321 +138166,668 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1453: $PREPARSER ./calc input -input: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (* *) + (*) + (*) - | 1 + 2 * 3 + !+ ++ -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +448. types.at:139: | (# + 1) = 1111 stderr: -./calc.at:1434: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 +Next token is token number (1.3: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +2.1: syntax error, unexpected '+' + ok +1.7: syntax error, unexpected '=' +./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +stderr: +./calc.at:1459: cat stderr +stderr: +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1449: cat stderr +./calc.at:1426: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +1.7: syntax error, unexpected '=' +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +syntax error +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 20 4 12 20 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 20 4 12 20 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: + | error +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +Stack now 0 +./calc.at:1477: $PREPARSER ./calc input +stderr: +stderr: +syntax error: invalid character: '#' +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +input: +stderr: + +input: + | (1 + #) = 1111 +input: +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1431: $PREPARSER ./calc input +' expout || exit 77 + | + | +1 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token number (1.3: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 + | error +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1458: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1478: cat stderr +stderr: +stderr: +stderr: +./calc.at:1449: $PREPARSER ./calc input +syntax error +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' + +1.1: syntax error +stderr: +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +syntax error +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +input: +./calc.at:1457: cat stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: cat stderr +./calc.at:1453: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !- ++ +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1432: cat stderr +556. calc.at:1482: testing Calculator C++ %glr-parser %debug ... + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +Stack now 0 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1482: mv calc.y.tmp calc.y + + | error +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +./calc.at:1440: cat stderr +./calc.at:1438: cat stderr +stderr: +1.1: syntax error +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1437: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +1.1: syntax error +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +input: +./calc.at:1455: cat stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.1: syntax error +./calc.at:1435: cat stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1441: cat stderr +stderr: +./calc.at:1457: $PREPARSER ./calc input +stderr: +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +syntax error: invalid character: '#' + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1440: $PREPARSER ./calc /dev/null +1.1: syntax error, unexpected end of input + | + | +1 +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: $PREPARSER ./calc input +stderr: +stderr: +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.1: syntax error +557. calc.at:1482: testing Calculator glr2.cc %debug ... +./calc.at:1482: mv calc.y.tmp calc.y + +input: +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +558. calc.at:1485: testing Calculator C++ %glr-parser parse.error=detailed %debug %name-prefix "calc" %verbose ... +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 + | 1//2 +stdout: +2.1: syntax error, unexpected '+' +./calc.at:1455: $PREPARSER ./calc input +input: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +./types.at:139: $PREPARSER ./test +input: + | + | +1 +stderr: +./calc.at:1458: cat stderr + | + | +1 +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error, unexpected end of input +stderr: +./calc.at:1485: mv calc.y.tmp calc.y + +./calc.at:1433: cat stderr +1.2: syntax error: invalid character: '#' +stderr: +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) + | + | +1 +input: +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 = 2 = 3 +stderr: +./calc.at:1441: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 +Next token is token '/' (1.3: ) +LAC: initial context established for '/' +LAC: checking lookahead '/': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +2.1: syntax error, unexpected '+' +stderr: +2.1: syntax error, unexpected '+' +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: cat stderr +input: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1482: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | (# + 1) = 1111 +./calc.at:1433: $PREPARSER ./calc input + | + | +1 +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1445: cat stderr +./calc.at:1426: cat stderr +./calc.at:1476: cat stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: cat stderr +stderr: +stderr: +stderr: +stderr: +2.1: syntax error, unexpected '+' +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 20 4 12 21 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +450. types.at:139: Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -135197,51 +138837,472 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token end of inpinput: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1435: "$PERL" -pi -e 'use strict; +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 + ok +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1434: cat stderr + +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1440: $PREPARSER ./calc input my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -ut (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +LAC: initial context established for '/' +LAC: checking lookahead '/': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stderr: +1.2: syntax error: invalid character: '#' +stderr: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1482: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error, unexpected '+' stderr: -./calc.at:1432: $PREPARSER ./calc input -syntax error +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 stderr: +./calc.at:1477: cat stderr +./calc.at:1468: cat stderr +2.1: syntax error, unexpected '+' +./calc.at:1457: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1 = 2 = 3 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1445: $PREPARSER ./calc input +stderr: +./calc.at:1459: $PREPARSER ./calc /dev/null +./calc.at:1437: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1426: $PREPARSER ./calc input +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.2: syntax error: invalid character: '#' +input: +input: +input: + | 1 = 2 = 3 + | 1 = 2 = 3 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +input: +' expout || exit 77 +./calc.at:1477: $PREPARSER ./calc input + | (#) + (#) = 2222 + | 1 = 2 = 3 +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1476: $PREPARSER ./calc input +446. types.at:139: ok +./calc.at:1431: cat stderr +./calc.at:1432: cat stderr +stderr: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: cat stderr +./calc.at:1478: cat stderr +stderr: +./calc.at:1440: cat stderr +./calc.at:1435: cat stderr +stderr: +stderr: +input: +stderr: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +input: +' expout || exit 77 +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: ./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +1.7: syntax error + | + | +1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Stack now 0 8 18 +Error: popping token '=' () +Stack now 0 8 +Error: popping nterm exp (1) +Stack now 0 +Cleanup: discarding lookahead token '=' () +Stack now 0 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | + | +1 +./calc.at:1453: $PREPARSER ./calc /dev/null +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1457: $PREPARSER ./calc input +syntax error: invalid character: '#' +input: +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: cat stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | (# + 1) = 1111 +./calc.at:1431: $PREPARSER ./calc input +stderr: +stderr: +559. calc.at:1485: testing Calculator glr2.cc parse.error=detailed %debug %name-prefix "calc" %verbose ... +stderr: +stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1485: mv calc.y.tmp calc.y + +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stderr: + +stderr: +stderr: +syntax error +1.1: syntax error, unexpected end of input +./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1435: $PREPARSER ./calc /dev/null +560. calc.at:1486: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" %verbose ... +./calc.at:1486: mv calc.y.tmp calc.y + +./calc.at:1458: cat stderr +input: +input: +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1485: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +syntax error +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (1 + # + 1) = 1111 +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1454: cat stderr + | 1 = 2 = 3 +./calc.at:1432: $PREPARSER ./calc input +1.7: syntax error +syntax error: invalid character: '#' +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1455: cat stderr +./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +./calc.at:1458: $PREPARSER ./calc /dev/null +stderr: +stderr: +1.1: syntax error, unexpected end of input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Stack now 0 8 18 +Error: popping token '=' () +Stack now 0 8 +Error: popping nterm exp (1) +Stack now 0 +Cleanup: discarding lookahead token '=' () +Stack now 0 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1438: $PREPARSER ./calc /dev/null +syntax error: invalid character: '#' +stderr: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1441: cat stderr +./calc.at:1448: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stderr: +stderr: stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.1: syntax error, unexpected end of input Starting parse Entering state 0 Stack now 0 @@ -135552,58 +139613,54 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token end of inpstderr: -./calc.at:1435: cat stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -ut (2.1: ) +Next token is token end of input (2.1: ) Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +1.6: syntax error: invalid character: '#' +./calc.at:1454: $PREPARSER ./calc /dev/null +stderr: +syntax error +./calc.at:1433: cat stderr +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) Stack now 0 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: +1.1: syntax error, unexpected end of input +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +input: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1433: "$PERL" -pi -e 'use strict; +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -135613,9 +139670,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: ./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +1.1: syntax error, unexpected end of file +syntax error: invalid character: '#' +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1437: cat stderr +./calc.at:1455: $PREPARSER ./calc input +stderr: +stderr: +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -135926,57 +140013,439 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) +Next token is token end of inp1.7: syntax error +ut (2.1: ) Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1459: cat stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +stderr: +./calc.at:1448: $PREPARSER ./calc input +stderr: +./calc.at:1434: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error, unexpected end of file +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Stack now 0 8 18 27 +Next token is token invalid token (1.1: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: cat stderr +./calc.at:1457: cat stderr +./calc.at:1477: cat stderr + | + | +1 +./calc.at:1441: $PREPARSER ./calc input +stderr: +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +./calc.at:1445: cat stderr +./calc.at:1435: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Stack now 0 8 18 -Error: popping token '=' () -Stack now 0 8 -Error: popping nterm exp (1) +Next token is token invalid token (1.1: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -Cleanup: discarding lookahead token '=' () +1.6: syntax error: invalid character: '#' +input: +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 Stack now 0 -./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1437: cat stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (1 + #) = 1111 +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | + | +1 +./calc.at:1477: $PREPARSER ./calc input stderr: -stdout: +./calc.at:1453: cat stderr +stderr: +./calc.at:1438: cat stderr +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1426: cat stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +input: +input: +./calc.at:1476: cat stderr +stderr: +stderr: +stderr: +input: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + | (!!) + (1 2) = 1 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +1.6: syntax error: invalid character: '#' +' expout || exit 77 +2.1: syntax error + | + | +1 +./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1458: cat stderr +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: $PREPARSER ./calc input +stderr: +./calc.at:1449: $PREPARSER ./calc input +stderr: +./calc.at:1468: cat stderr +./calc.at:1440: cat stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 stderr: +stderr: +./calc.at:1478: cat stderr +input: +./calc.at:1431: cat stderr +stderr: +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1468: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +2.1: syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Stack now 0 +Cleanup: discarding lookahead token '+' () +Stack now 0 +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1453: $PREPARSER ./calc input +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +2.1: syntax error +input: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: cat stderr + | (1 + 1) / (1 - 1) + | 1 2 +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: + | + | +1 +./calc.at:1476: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (!!) + (1 2) = 1 +./calc.at:1440: $PREPARSER ./calc input +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1455: cat stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +stderr: +stderr: +stderr: +stderr: +error: null divisor +1.3: syntax error Starting parse Entering state 0 Stack now 0 @@ -136287,38 +140756,22 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token end of inp./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error -1.10: syntax error -1.16: syntax error -ut (2.1: ) +Next token is token end of input (2.1: ) Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $PREPARSER ./test -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1454: cat stderr +input: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -136329,26 +140782,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1454: cat stderr -input: -input: -input: -input: - | 1 2 - | 1 + 2 * 3 + !- ++ -stderr: | | +1 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1435: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1426: cat stderr -./calc.at:1438: cat stderr -./calc.at:1441: cat stderr -./calc.at:1433: cat stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1478: $PREPARSER ./calc input +stderr: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +2.1: syntax error +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136358,13 +140804,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -stderr: -1.3: syntax error, unexpected number -stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; +input: +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136373,98 +140816,46 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ' expout || exit 77 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +1.3: syntax error +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Stack now 0 8 18 -Error: popping token '=' () -Stack now 0 8 -Error: popping nterm exp (1) +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () Stack now 0 -Cleanup: discarding lookahead token '=' () +Cleanup: discarding lookahead token '+' () Stack now 0 -input: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1438: $PREPARSER ./calc /dev/null -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | error -./calc.at:1454: $PREPARSER ./calc input -1.3: syntax error, unexpected number - | (!!) + (1 2) = 1 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1440: cat stderr -input: -stderr: stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -======== Testing with C++ standard flags: '' - | (#) + (#) = 2222 -./calc.at:1433: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -1.1: syntax error, unexpected invalid token - | 1 = 2 = 3 stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1432: cat stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1454: $PREPARSER ./calc input stderr: +2.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: cat stderr stderr: Starting parse Entering state 0 @@ -136475,121 +140866,298 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.14: ) +Next token is token '+' (1.30: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -136599,46 +141167,33 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) +Next token is token end of inp1.11-17: error: null divisor +ut (2.1: ) Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1459: cat stderr stderr: -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -2.1: syntax error, unexpected '+' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: cat stderr stderr: +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error + | (1 + # + 1) = 1111 +./calc.at:1431: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 Starting parse Entering state 0 Stack now 0 @@ -136779,27 +141334,23 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.1: syntax error, unexpected invalid token -input: -./calc.at:1453: cat stderr -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 - | (!!) + (1 2) = 1 input: -./calc.at:1445: cat stderr -./calc.at:1440: $PREPARSER ./calc input -syntax error - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1435: "$PERL" -pi -e 'use strict; + | 1 = 2 = 3 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1455: $PREPARSER ./calc input +error: null divisor +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136809,12 +141360,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1434: cat stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1441: $PREPARSER ./calc /dev/null +./calc.at:1477: cat stderr +stderr: +2.1: syntax error +./calc.at:1486: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136824,7 +141375,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1454: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136834,9 +141387,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: +1.11-17: error: null divisor +syntax error: invalid character: '#' input: stderr: -./calc.at:1433: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136845,8 +141403,27 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +syntax error ' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136856,17 +141433,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stderr: +./calc.at:1459: $PREPARSER ./calc input stderr: -./calc.at:1435: cat stderr - | 1//2 -syntax error -./calc.at:1453: $PREPARSER ./calc input - | - | +1 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: $PREPARSER ./calc input +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1477: $PREPARSER ./calc /dev/null +input: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -137007,9 +141597,104 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +LAC: checking lookahead '=': Err +LAC: checking lookahead end of file: R6 G8 Err +LAC: checking lookahead number: R6 G8 Err +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +LAC: checking lookahead NEG: R6 G8 Err +LAC: checking lookahead '^': S23 +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: +./calc.at:1434: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1457: $PREPARSER ./calc input + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +stderr: +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1438: cat stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1453: cat stderr +./calc.at:1435: cat stderr stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -137019,45 +141704,222 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1454: cat stderr -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: cat stderr +stderr: +1.1: syntax error +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 + | (1 + 1) / (1 - 1) +./calc.at:1433: $PREPARSER ./calc input ./calc.at:1437: cat stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1445: cat stderr stderr: -./calc.at:1435: $PREPARSER ./calc /dev/null -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | (# + 1) = 1111 +stderr: +1.1: syntax error +./calc.at:1449: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1434: $PREPARSER ./calc input +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1448: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +LAC: checking lookahead '=': Err +LAC: checking lookahead end of file: R6 G8 Err +LAC: checking lookahead number: R6 G8 Err +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +LAC: checking lookahead NEG: R6 G8 Err +LAC: checking lookahead '^': S23 +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '+' () +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 +input: +1.11-17: error: null divisor +input: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (!!) + (1 2) = 1 + | (!!) + (1 2) = 1 stderr: +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' +input: +./calc.at:1432: cat stderr +./calc.at:1478: cat stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1458: cat stderr +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc /dev/null stderr: -./calc.at:1434: $PREPARSER ./calc input stderr: -1.1: syntax error, unexpected end of input +./calc.at:1440: cat stderr +./calc.at:1453: $PREPARSER ./calc input +1.1: syntax error +1.11-17: error: null divisor + | (!!) + (1 2) = 1 +stderr: +./calc.at:1426: cat stderr +./calc.at:1476: cat stderr +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1441: cat stderr +input: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: + | 1//2 +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1459: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input () +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +Stack now 0 +stderr: +./calc.at:1478: $PREPARSER ./calc /dev/null +1.2: syntax error: invalid character: '#' +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $PREPARSER ./calc /dev/null +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +input: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +input: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -137198,48 +142060,7 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: cat stderr -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | 1 = 2 = 3 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1454: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Stack now 0 -Cleanup: discarding lookahead token '+' () -Stack now 0 -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -137249,53 +142070,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1433: cat stderr + | (!!) + (1 2) = 1 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 input: -./calc.at:1438: cat stderr - | 1 + 2 * 3 + !+ ++ -./types.at:139: $PREPARSER ./test -./calc.at:1437: $PREPARSER ./calc input -stderr: -stderr: -stderr: -1.7: syntax error, unexpected '=' -1.1: syntax error, unexpected end of input +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: $PREPARSER ./calc input input: -./calc.at:1440: "$PERL" -pi -e 'use strict; +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +1.1: syntax error my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1457: cat stderr stderr: -input: -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' | (- *) + (1 2) = 1 - | 1 + 2 * 3 + !- ++ -input: - | (1 + #) = 1111 -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: "$PERL" -pi -e 'use strict; +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1468: cat stderr +1.3: syntax error +stderr: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +522. calc.at:1426: ./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -137305,213 +142109,63 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1441: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc input +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 + ok +524. calc.at:1432: stderr: stderr: -./calc.at:1440: cat stderr stderr: +stderr: +stderr: + ok +1.1: syntax error +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1454: $PREPARSER ./calc input +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input () +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +Stack now 0 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: + | (- *) + (1 2) = 1 +./calc.at:1477: cat stderr ./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error, unexpected '=' -1.6: syntax error: invalid character: '#' -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: $PREPARSER ./calc input stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1455: cat stderr ' expout || exit 77 -======== Testing with C++ standard flags: '' -./calc.at:1426: cat stderr -./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1433: cat stderr +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1453: cat stderr input: stderr: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1435: cat stderr -stdout: -1.6: syntax error: invalid character: '#' - | (- *) + (1 2) = 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: $PREPARSER ./calc input stderr: -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - + | (!!) + (1 2) = 1 +1.1: syntax error Starting parse Entering state 0 Stack now 0 @@ -137822,6 +142476,149 @@ Entering state 6 Stack now 0 6 Reading a token +Next token is token end of inp./calc.at:1457: $PREPARSER ./calc input +ut (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.3: syntax error +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token Next token is token end of input (2.1: ) Shifting token end of input (2.1: ) Entering state 16 @@ -137829,8 +142626,11 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1454: "$PERL" -pi -e 'use strict; + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -137840,31 +142640,197 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: cat stderr +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -stdout: input: -./calc.at:1457: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1453: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1445: cat stderr -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1454: cat stderr +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 Starting parse Entering state 0 Stack now 0 @@ -138182,6 +143148,45 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +./calc.at:1477: $PREPARSER ./calc input +stderr: +input: +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: cat stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1437: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +stderr: + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -138330,8 +143335,9 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; +525. calc.at:1433: stderr: + +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -138341,14 +143347,1371 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 + ok +stdout: input: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: cat stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1431: $PREPARSER ./calc input stderr: stderr: -./calc.at:1434: cat stderr -1.1: syntax error, unexpected invalid token +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1446: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +LAC: checking lookahead end of file: S16 +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +stderr: +stderr: +stdout: +stderr: +stdout: +./calc.at:1451: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./types.at:139: $PREPARSER ./test +561. calc.at:1486: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" %verbose ... +./calc.at:1486: mv calc.y.tmp calc.y + +./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: cat stderr +input: +./calc.at:1458: cat stderr +input: +./calc.at:1435: cat stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +input: + | (* *) + (*) + (*) + | (- *) + (1 2) = 1 + | (1 + # + 1) = 1111 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1434: $PREPARSER ./calc input +stderr: +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1445: cat stderr +input: +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1446: $PREPARSER ./calc input +error: null divisor +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +LAC: checking lookahead end of file: S16 +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1448: cat stderr +stderr: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1438: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +stderr: +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.6: syntax error: invalid character: '#' +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +input: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: $PREPARSER ./calc input +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1445: $PREPARSER ./calc input +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +563. calc.at:1489: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: mv calc.y.tmp calc.y + +input: +input: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: input: stderr: +./calc.at:1441: cat stderr +./calc.at:1440: cat stderr +449. types.at:139: 562. calc.at:1487: testing Calculator glr2.cc parse.error=custom %debug %name-prefix "calc" %verbose ... + | error + | (- *) + (1 2) = 1 +./calc.at:1448: $PREPARSER ./calc input + | (- *) + (1 2) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 input: + ok +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1435: $PREPARSER ./calc input +error: null divisor +stderr: +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: mv calc.y.tmp calc.y + + | (- *) + (1 2) = 1 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +./calc.at:1454: cat stderr +stderr: +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 19 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 19 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1487: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1457: cat stderr +stderr: +1.1: syntax error +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1476: cat stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -138497,57 +144860,64 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -input: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1445: $PREPARSER ./calc /dev/null -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' stderr: -./calc.at:1457: $PREPARSER ./calc input +input: + | (!!) + (1 2) = 1 +./calc.at:1441: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 ' expout || exit 77 +./calc.at:1455: cat stderr +stderr: + | (- *) + (1 2) = 1 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 + | (* *) + (*) + (*) +1.1: syntax error +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: $PREPARSER ./calc input +input: +stderr: +stderr: +input: +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -138696,309 +145066,97 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error, unexpected invalid token -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -./calc.at:1426: $PREPARSER ./calc input -' expout || exit 77 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | - | +1 -./calc.at:1454: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input () -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -Stack now 0 -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: - | (1 + #) = 1111 -./calc.at:1434: $PREPARSER ./calc input -stderr: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: cat stderr -stderr: -stderr: -syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: cat stderr -2.1: syntax error, unexpected '+' -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: cat stderr -syntax error -1.6: syntax error: invalid character: '#' -./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1453: cat stderr -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -2.1: syntax error, unexpected '+' -input: -input: -./calc.at:1441: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1440: cat stderr - | (# + 1) = 1111 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input () -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -Stack now 0 -./calc.at:1433: $PREPARSER ./calc input -input: -./calc.at:1438: $PREPARSER ./calc input - | 1 = 2 = 3 -input: -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (#) + (#) = 2222 -input: -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 2 - | 1 2 -stderr: -input: -stderr: -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1432: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: $PREPARSER ./calc input -stderr: -./calc.at:1437: cat stderr -1.7: syntax error, unexpected '=' - | (* *) + (*) + (*) -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | (- *) + (1 2) = 1 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -139008,1374 +145166,943 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: cat stderr -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) - | (* *) + (*) + (*) -./calc.at:1440: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -stderr: -stderr: -./calc.at:1454: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 8 20 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token '=' (1.14: ) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.7: syntax error, unexpected '=' -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./types.at:139: ./check -./calc.at:1454: $PREPARSER ./calc /dev/null -stderr: -stderr: -input: -stderr: -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 -Stack now 0 4 +Stack now 0 6 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 8 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: -stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error, unexpected end of file - | (!!) + (1 2) = 1 - | (#) + (#) = 2222 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: cat stderr -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stdout: -stdout: -./calc.at:1448: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1458: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -stderr: -./calc.at:1445: cat stderr -./calc.at:1426: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1438: cat stderr -1.1: syntax error, unexpected end of file -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -stderr: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: cat stderr -./calc.at:1459: cat stderr -./calc.at:1453: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 102): +./calc.at:1457: $PREPARSER ./calc input + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -input: -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1448: $PREPARSER ./calc input -stderr: -stderr: -syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: cat stderr -input: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: - | (1 + #) = 1111 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1434: cat stderr - | - | +1 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1433: cat stderr -stderr: -input: -stdout: -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./types.at:139: ./check -stderr: -./calc.at:1438: $PREPARSER ./calc input -input: -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -input: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -2.1: syntax error, unexpected '+' - | 1//2 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -stderr: -./calc.at:1441: cat stderr -./calc.at:1454: cat stderr - | 1//2 - | (1 + # + 1) = 1111 -./calc.at:1433: $PREPARSER ./calc input -stderr: -./calc.at:1440: cat stderr -./calc.at:1459: $PREPARSER ./calc input -stderr: -input: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 8 19 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Stack now 0 8 20 4 12 +Stack now 0 6 8 19 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.6: syntax error: invalid character: '#' -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1434: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -stderr: -2.1: syntax error, unexpected '+' -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1437: cat stderr -./calc.at:1445: $PREPARSER ./calc input -input: -stderr: -stderr: -stderr: -1.6: syntax error: invalid character: '#' -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -1.2: syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 6 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 -Stack now 0 8 20 4 12 +Stack now 0 6 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1454: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ - | (- *) + (1 2) = 1 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) + +stderr: +./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 Starting parse Entering state 0 Stack now 0 @@ -140693,30 +146420,153 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -stderr: -input: +./calc.at:1455: $PREPARSER ./calc /dev/null stderr: -input: -./calc.at:1441: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' - | (1 + #) = 1111 - | 1 2 -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1458: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1438: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -140725,8 +146575,9 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +input: ' expout || exit 77 -1.2: syntax error: invalid character: '#' +stderr: ./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -140737,214 +146588,184 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1437: $PREPARSER ./calc input - | 1 2 -stderr: -stderr: -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +./calc.at:1476: $PREPARSER ./calc input 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1438: cat stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: cat stderr -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -stderr: -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: + | 1 2 +./calc.at:1477: cat stderr +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +stderr: +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1468: cat stderr +stderr: +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -140953,21 +146774,43 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg ' expout || exit 77 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: cat stderr -./calc.at:1459: cat stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +' expout || exit 77 +./calc.at:1459: $PREPARSER ./calc input input: +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) stderr: -input: -1.3: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 Starting parse Entering state 0 Stack now 0 @@ -141285,209 +147128,219 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg stderr: +' expout || exit 77 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 +stderr: + | 1 + 2 * 3 + !+ ++ Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token end of file (1.1: ) +LAC: initial context established for end of file +LAC: checking lookahead end of file: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1453: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | error -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1438: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1426: cat stderr -stderr: -1.3: syntax error -stderr: -./calc.at:1433: cat stderr -stderr: -./calc.at:1453: $PREPARSER ./calc /dev/null -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 20 29 21 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1437: $PREPARSER ./calc input +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1458: cat stderr -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1432: cat stderr -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: cat stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -./calc.at:1435: "$PERL" -pi -e 'use strict; +1.3: syntax error +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1448: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 Starting parse Entering state 0 Stack now 0 @@ -141639,10 +147492,50 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of file (1.1: ) +LAC: initial context established for end of file +LAC: checking lookahead end of file: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +input: + | 1 2 +./calc.at:1446: $PREPARSER ./calc input input: + | (!!) + (1 2) = 1 stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1448: "$PERL" -pi -e 'use strict; +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1438: cat stderr +1.3: syntax error +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1431: cat stderr +./calc.at:1434: cat stderr +stderr: +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141652,16 +147545,72 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error, unexpected end of input -input: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) stderr: -./calc.at:1440: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | error +stderr: +input: +input: +./calc.at:1458: $PREPARSER ./calc input input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: cat stderr + | 1 = 2 = 3 + | (* *) + (*) + (*) +./calc.at:1449: cat stderr +./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | (!!) + (1 2) = 1 +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1457: cat stderr +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1441: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: ./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -141672,22 +147621,123 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1454: "$PERL" -pi -e 'use strict; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; +input: +523. calc.at:1431: my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.7: syntax error + ok +./calc.at:1478: cat stderr +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 + | (* *) + (*) + (*) +./calc.at:1435: $PREPARSER ./calc input +stderr: +./calc.at:1445: cat stderr +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | (* *) + (*) + (*) +./calc.at:1459: cat stderr + | (* *) + (*) + (*) +input: +input: +./calc.at:1486: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1454: cat stderr +./calc.at:1455: cat stderr +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1457: $PREPARSER ./calc input +syntax error +error: 2222 != 1 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +stderr: +input: +./calc.at:1449: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +stderr: +stderr: +input: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1434: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1440: cat stderr +stderr: +stderr: +input: +./calc.at:1445: $PREPARSER ./calc input +1.7: syntax error + | 1 + 2 * 3 + !- ++ + | (- *) + (1 2) = 1 +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1437: $PREPARSER ./calc input +564. calc.at:1489: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141697,8 +147747,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + # + 1) = 1111 +stderr: input: +stderr: +./calc.at:1489: mv calc.y.tmp calc.y + Starting parse Entering state 0 Stack now 0 @@ -141850,547 +147903,29 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1435: cat stderr -input: -stderr: -stderr: -stderr: -stderr: -./calc.at:1426: $PREPARSER ./calc input -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1434: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - | 1 + 2 * 3 + !- ++ -./calc.at:1441: $PREPARSER ./calc input -stderr: -./calc.at:1454: cat stderr -1.1: syntax error, unexpected end of input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: - | 1//2 -stderr: -./calc.at:1458: $PREPARSER ./calc input stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) stderr: -1.6: syntax error: invalid character: '#' -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 +1.11: syntax error +1.1-16: error: 2222 != 1 stderr: -./calc.at:1445: cat stderr -./calc.at:1457: cat stderr -1.11-17: error: null divisor - | (# + 1) = 1111 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1437: cat stderr -./calc.at:1448: cat stderr -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: input: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -stderr: -./calc.at:1432: $PREPARSER ./calc input -stderr: - | (* *) + (*) + (*) -./calc.at:1435: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1454: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error syntax error -syntax error -error: 4444 != 1 -1.11-17: error: null divisor -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1453: cat stderr -input: -stderr: -input: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.2: syntax error: invalid character: '#' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +error: 2222 != 1 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | 1//2 -input: -./calc.at:1476: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./calc.at:1438: cat stderr -1.6: syntax error: invalid character: '#' | (!!) + (1 2) = 1 -./calc.at:1448: $PREPARSER ./calc input -stderr: - | 1 = 2 = 3 -./calc.at:1437: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1459: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error -./calc.at:1453: $PREPARSER ./calc input +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -input: -stderr: ./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: cat stderr -stderr: -1.3: syntax error -stderr: -stderr: input: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 - | 1 + 2 * 3 + !+ ++ +stderr: Starting parse Entering state 0 Stack now 0 @@ -142531,294 +148066,170 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1458: cat stderr -./calc.at:1441: cat stderr -437. types.at:139: ./calc.at:1438: $PREPARSER ./calc input - | 1 2 -./calc.at:1433: cat stderr -stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - ok -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -input: + | (* *) + (*) + (*) +./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1454: $PREPARSER ./calc input stderr: -input: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) + | 1 + 2 * 3 + !+ ++ +1.11-17: error: null divisor stderr: - | (#) + (#) = 2222 - | 1 = 2 = 3 ./calc.at:1459: $PREPARSER ./calc input -./calc.at:1441: $PREPARSER ./calc input - | (#) + (#) = 2222 -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stdout: -525. calc.at:1433: ok -stderr: -syntax error -./calc.at:1426: cat stderr + stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Stack now 0 4 5 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.15: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1440: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1468: "$PERL" -ne ' +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stdout: +./calc.at:1476: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -142829,8 +148240,51 @@ || /\t/ )' calc.cc +565. calc.at:1491: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1491: mv calc.y.tmp calc.y + +./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +stderr: +stdout: +stdout: +./calc.at:1479: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./calc.at:1443: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1451: cat stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (!!) + (1 2) = 1 + | 1 + 2 * 3 + !+ ++ +./calc.at:1440: $PREPARSER ./calc input stderr: -./calc.at:1435: cat stderr ./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -142841,23 +148295,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1455: $PREPARSER ./calc input +stderr: +./calc.at:1476: $PREPARSER ./calc input +stderr: +input: +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1434: cat stderr -stderr: -stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -142867,7 +148315,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1476: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -142877,524 +148345,418 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) +Next token is token ')' (1.2: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: cat stderr -stderr: -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: cat stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.13: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift Entering state 20 -Stack now 0 8 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '+' (1.17: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.18: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '+' (1.20: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -./calc.at:1437: cat stderr -./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: - | error -./calc.at:1458: $PREPARSER ./calc input -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1454: cat stderr -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1434: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 +Stack now 0 8 20 4 11 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 +Next token is token '*' (1.25: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +LAC: initial context discarded due to error recovery +Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Reading a token +Next token is token '*' (1.27: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +LAC: initial context discarded due to error recovery +Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '+' (1.30: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -input: -./calc.at:1448: cat stderr -stderr: - -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 20 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '*' (1.39: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': R9 G12 S21 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +LAC: initial context discarded due to shift +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +LAC: initial context discarded due to error recovery +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' (1.44: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack by rule 5 (line 92): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (1.47-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + # + 1) = 1111 -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) - -./calc.at:1453: cat stderr -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | (!!) + (1 2) = 1 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: $PREPARSER ./calc input input: -./calc.at:1426: $PREPARSER ./calc input stderr: -input: stderr: - | error - | - | +1 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor Starting parse Entering state 0 Stack now 0 @@ -143404,104 +148766,129 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.15: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -143518,231 +148905,23 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -input: -./calc.at:1457: $PREPARSER ./calc input -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: -input: -stderr: -input: -stderr: -stderr: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1448: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1459: cat stderr -1.11-17: error: null divisor - | (1 + # + 1) = 1111 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1438: $PREPARSER ./calc input -syntax error -error: 2222 != 1 - | (!!) + (1 2) = 1 -stderr: -./calc.at:1453: $PREPARSER ./calc input -stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: cat stderr -stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -1.11-17: error: null divisor -./calc.at:1458: cat stderr -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 | - | +1 -stderr: -stderr: -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1459: $PREPARSER ./calc input -1.1: syntax error -input: -syntax error -error: 2222 != 1 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 2 -stderr: + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 stdout: -input: -./calc.at:1468: $PREPARSER ./calc input -stderr: -./calc.at:1441: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | 1 = 2 = 3 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -stderr: -./calc.at:1458: $PREPARSER ./calc input -stderr: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1440: cat stderr -input: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -input: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1443: $PREPARSER ./calc input ./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1455: "$PERL" -ne ' +./calc.at:1469: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -143753,38 +148932,12 @@ || /\t/ )' calc.cc -./calc.at:1457: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -input: -./calc.at:1435: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - | (1 + #) = 1111 -stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: cat stderr -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1441: $PREPARSER ./calc input -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -syntax error -stderr: -./calc.at:1457: $PREPARSER ./calc /dev/null -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; ./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -143795,9 +148948,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: cat stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143807,207 +148975,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1446: cat stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: stderr: -input: -input: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | 1//2 +stderr: +stderr: +./calc.at:1477: cat stderr +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1437: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 @@ -144017,26 +148996,18 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 4 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Stack now 0 4 Shifting token error () Entering state 11 @@ -144156,56 +149127,6 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -547. calc.at:1476: testing Calculator glr2.cc ... -./calc.at:1459: cat stderr - | (1 + #) = 1111 -stderr: -./calc.at:1476: mv calc.y.tmp calc.y - -./calc.at:1440: $PREPARSER ./calc input -input: -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: cat stderr -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1455: $PREPARSER ./calc input - | 1//2 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1434: cat stderr -./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1437: cat stderr -./calc.at:1453: cat stderr -./calc.at:1459: $PREPARSER ./calc /dev/null -stderr: -stderr: -stderr: -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -144215,472 +149136,671 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token ')' (1.2: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': R9 G12 S21 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +LAC: initial context discarded due to shift +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 92): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.47-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -548. calc.at:1477: testing Calculator C++ %glr-parser %locations ... -./calc.at:1454: cat stderr -./calc.at:1426: cat stderr stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Stack now 0 8 20 4 -Shifting token error () +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -syntax error +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: cat stderr -input: +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +syntax error +error: 2222 != 1 +stderr: ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1469: $PREPARSER ./calc input input: -./calc.at:1448: cat stderr -./calc.at:1477: mv calc.y.tmp calc.y - input: -526. calc.at:1434: ok - | (- *) + (1 2) = 1 -./calc.at:1458: cat stderr ./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -144691,19 +149811,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | (1 + 1) / (1 - 1) - | (1 + 1) / (1 - 1) -./calc.at:1432: $PREPARSER ./calc input +./calc.at:1453: cat stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1458: $PREPARSER ./calc input + | 1//2 +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1453: $PREPARSER ./calc input -syntax error -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -./calc.at:1435: cat stderr -./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +stderr: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +1.3: syntax error +stderr: Starting parse Entering state 0 Stack now 0 @@ -144712,7 +149846,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -144727,7 +149861,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -144742,16 +149876,14 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R9 G29 R7 G8 S18 -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -144759,7 +149891,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -144768,7 +149900,6 @@ Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -144776,16 +149907,14 @@ Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) @@ -144794,16 +149923,15 @@ Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 @@ -144813,7 +149941,7 @@ Shifting token number (2.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 @@ -144828,7 +149956,7 @@ Shifting token number (2.5: 2) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 @@ -144848,23 +149976,21 @@ Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 30 Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -144872,7 +149998,7 @@ Entering state 29 Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -144881,7 +150007,6 @@ Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -144894,23 +150019,21 @@ Shifting token number (2.15: 5) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) @@ -144919,16 +150042,15 @@ Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) @@ -144939,12 +150061,12 @@ Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) @@ -144960,7 +150082,7 @@ Shifting token number (4.2: 1) Entering state 1 Stack now 0 6 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 @@ -144975,16 +150097,14 @@ Shifting token number (4.4: 2) Entering state 1 Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G10 R11 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -144992,7 +150112,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -145000,7 +150120,6 @@ Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -145013,24 +150132,21 @@ Shifting token number (4.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): - $1 = token stderr: -number (4.9: 1) +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) @@ -145039,16 +150155,15 @@ Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) @@ -145069,16 +150184,14 @@ Shifting token number (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R11 G12 S26 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -145086,10 +150199,9 @@ Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -145106,16 +150218,14 @@ Shifting token number (5.6: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -145124,7 +150234,6 @@ Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -145132,16 +150241,14 @@ Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) @@ -145150,16 +150257,15 @@ Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) @@ -145170,12 +150276,12 @@ Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) @@ -145201,30 +150307,28 @@ Shifting token number (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -145232,7 +150336,6 @@ Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -145245,23 +150348,21 @@ Shifting token number (7.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) @@ -145270,16 +150371,15 @@ Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) @@ -145290,12 +150390,12 @@ Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) @@ -145306,7 +150406,7 @@ Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 @@ -145321,16 +150421,14 @@ Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -LAC: initial context established for '-' -LAC: checking lookahead '-': R8 G8 S19 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -145339,7 +150437,6 @@ Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) -LAC: initial context discarded due to shift Entering state 19 Stack now 0 6 8 19 Reading a token @@ -145347,16 +150444,14 @@ Shifting token number (9.9: 3) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -145365,7 +150460,6 @@ Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -145378,23 +150472,21 @@ Shifting token number (9.14: 4) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) @@ -145403,16 +150495,15 @@ Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) @@ -145423,7 +150514,7 @@ Shifting token number (10.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 @@ -145443,7 +150534,7 @@ Shifting token number (10.6: 2) Entering state 1 Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 @@ -145458,16 +150549,14 @@ Shifting token number (10.10: 3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -145476,10 +150565,9 @@ Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -145488,9 +150576,7 @@ Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -145499,7 +150585,6 @@ Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -145507,16 +150592,14 @@ Shifting token number (10.15: 2) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) @@ -145525,16 +150608,15 @@ Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) @@ -145545,12 +150627,12 @@ Shifting token '\n' (11.1-12.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) @@ -145561,7 +150643,7 @@ Shifting token number (12.1: 2) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 @@ -145576,7 +150658,7 @@ Shifting token number (12.3: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 @@ -145591,16 +150673,14 @@ Shifting token number (12.5: 3) Entering state 1 Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G32 R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -145608,7 +150688,7 @@ Entering state 32 Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -145617,26 +150697,21 @@ Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token Next token is token number (12.9-11: 256) -Shifting token number (input: -./calc.at:1457: cat stderr -12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) @@ -145645,16 +150720,15 @@ Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) @@ -145670,7 +150744,7 @@ Shifting token number (13.2: 2) Entering state 1 Stack now 0 6 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 @@ -145685,16 +150759,14 @@ Shifting token number (13.4: 2) Entering state 1 Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' (13.5: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R12 G12 S26 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -145703,10 +150775,9 @@ Stack now 0 6 4 12 Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -145723,16 +150794,14 @@ Shifting token number (13.7: 3) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (13.9: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -145741,7 +150810,6 @@ Stack now 0 6 8 Next token is token '=' (13.9: ) Shifting token '=' (13.9: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -145749,16 +150817,14 @@ Shifting token number (13.11-12: 64) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (13.13-14.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (13.1-7: 64) $2 = token '=' (13.9: ) $3 = nterm exp (13.11-12: 64) @@ -145767,68 +150833,39 @@ Stack now 0 6 8 Next token is token '\n' (13.13-14.0: ) Shifting token '\n' (13.13-14.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (13.1-12: 64) $2 = token '\n' (13.13-14.0: ) -> $$ = nterm line (13.1-14.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-13.0: ) $2 = nterm line (13.1-14.0: ) -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (14.1: ) -Shifting token end of file (14.1: ) +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (14.1: ) +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: input: -stderr: -stderr: - | (* *) + (*) + (*) -./calc.at:1454: $PREPARSER ./calc input -1.11-17: error: null divisor -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 input: - | (- *) + (1 2) = 1 input: input: -./calc.at:1448: $PREPARSER ./calc input -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) input: - | (#) + (#) = 2222 -./calc.at:1468: cat stderr -1.11-17: error: null divisor -stderr: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | (#) + (#) = 2222 +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -145839,26 +150876,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1457: $PREPARSER ./calc input - | - | +1 -stderr: -./calc.at:1435: $PREPARSER ./calc input -stderr: -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1479: $PREPARSER ./calc input + | 1 2 + | (- *) + (1 2) = 1 +./calc.at:1477: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1468: cat stderr +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1435: cat stderr +./calc.at:1453: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1448: cat stderr +stderr: +./calc.at:1457: cat stderr +syntax error +error: 2222 != 1 +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -145867,7 +150917,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -145882,7 +150932,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -145897,16 +150947,95 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R9 G29 R7 G8 S18 -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -145914,7 +151043,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -145923,7 +151052,6 @@ Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -145931,16 +151059,14 @@ Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) @@ -145949,16 +151075,15 @@ Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 @@ -145968,7 +151093,7 @@ Shifting token number (2.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 @@ -145983,7 +151108,7 @@ Shifting token number (2.5: 2) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 @@ -146003,23 +151128,21 @@ Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 30 Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -146027,7 +151150,7 @@ Entering state 29 Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -146036,7 +151159,6 @@ Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -146049,23 +151171,21 @@ Shifting token number (2.15: 5) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) @@ -146074,16 +151194,15 @@ Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) @@ -146094,12 +151213,12 @@ Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) @@ -146115,7 +151234,7 @@ Shifting token number (4.2: 1) Entering state 1 Stack now 0 6 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 @@ -146130,16 +151249,14 @@ Shifting token number (4.4: 2) Entering state 1 Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G10 R11 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -146147,7 +151264,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -146155,7 +151272,6 @@ Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -146168,23 +151284,21 @@ Shifting token number (4.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) @@ -146193,16 +151307,16 @@ Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): +stderr: $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) @@ -146223,16 +151337,14 @@ Shifting token number (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R11 G12 S26 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -146240,10 +151352,9 @@ Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -146260,16 +151371,14 @@ Shifting token number (5.6: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -146278,7 +151387,6 @@ Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -146286,16 +151394,14 @@ Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) @@ -146304,16 +151410,15 @@ Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) @@ -146324,12 +151429,12 @@ Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) @@ -146355,30 +151460,28 @@ Shifting token number (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -146386,7 +151489,6 @@ Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -146399,23 +151501,21 @@ Shifting token number (7.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) @@ -146424,16 +151524,15 @@ Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) @@ -146444,12 +151543,12 @@ Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) @@ -146460,7 +151559,7 @@ Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 @@ -146475,16 +151574,14 @@ Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -LAC: initial context established for '-' -LAC: checking lookahead '-': R8 G8 S19 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -146493,7 +151590,6 @@ Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) -LAC: initial context discarded due to shift Entering state 19 Stack now 0 6 8 19 Reading a token @@ -146501,16 +151597,14 @@ Shifting token number (9.9: 3) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -146519,7 +151613,6 @@ Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -146532,23 +151625,21 @@ Shifting token number (9.14: 4) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) @@ -146557,16 +151648,15 @@ Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) @@ -146577,7 +151667,7 @@ Shifting token number (10.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 @@ -146597,7 +151687,7 @@ Shifting token number (10.6: 2) Entering state 1 Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 @@ -146612,16 +151702,14 @@ Shifting token number (10.10: 3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -146630,10 +151718,9 @@ Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -146642,9 +151729,7 @@ Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -146653,7 +151738,6 @@ Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -146661,16 +151745,14 @@ Shifting token number (10.15: 2) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) @@ -146679,16 +151761,15 @@ Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) @@ -146699,12 +151780,12 @@ Shifting token '\n' (11.1-12.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) @@ -146715,7 +151796,7 @@ Shifting token number (12.1: 2) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 @@ -146730,7 +151811,7 @@ Shifting token number (12.3: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 @@ -146745,16 +151826,14 @@ Shifting token number (12.5: 3) Entering state 1 Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G32 R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -146762,7 +151841,7 @@ Entering state 32 Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -146771,7 +151850,6 @@ Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -146779,16 +151857,14 @@ Shifting token number (12.9-11: 256) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) @@ -146797,16 +151873,15 @@ Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) @@ -146822,7 +151897,7 @@ Shifting token number (13.2: 2) Entering state 1 Stack now 0 6 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 @@ -146837,16 +151912,14 @@ Shifting token number (13.4: 2) Entering state 1 Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' (13.5: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R12 G12 S26 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -146855,10 +151928,9 @@ Stack now 0 6 4 12 Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -146872,19 +151944,47 @@ Stack now 0 6 8 23 Reading a token Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Shifting token num1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +ber (13.7: 3) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (13.9: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -146893,7 +151993,6 @@ Stack now 0 6 8 Next token is token '=' (13.9: ) Shifting token '=' (13.9: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -146901,16 +152000,14 @@ Shifting token number (13.11-12: 64) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (13.13-14.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (13.1-7: 64) $2 = token '=' (13.9: ) $3 = nterm exp (13.11-12: 64) @@ -146919,59 +152016,102 @@ Stack now 0 6 8 Next token is token '\n' (13.13-14.0: ) Shifting token '\n' (13.13-14.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (13.1-12: 64) $2 = token '\n' (13.13-14.0: ) -> $$ = nterm line (13.1-14.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-13.0: ) $2 = nterm line (13.1-14.0: ) -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (14.1: ) -Shifting token end of file (14.1: ) +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (14.1: ) +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1438: $PREPARSER ./calc input -1.11-17: error: null divisor -./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error, unexpected number +1.3: syntax error +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (- *) + (1 2) = 1 + | 1 2 + | 1 + 2 * 3 + !+ ++ +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1441: cat stderr +./calc.at:1435: $PREPARSER ./calc input + | + | +1 stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: cat stderr +./calc.at:1448: $PREPARSER ./calc input stderr: stderr: -./calc.at:1440: cat stderr - stderr: -input: stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -syntax error -error: 2222 != 1 -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 stderr: -./calc.at:1459: cat stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1468: $PREPARSER ./calc input -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number +./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.4: syntax error +1.12: syntax error 1.1-17: error: 2222 != 1 +syntax error, unexpected number +syntax error +input: +input: +input: ./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -146982,44 +152122,226 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: cat stderr +./calc.at:1491: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: cat stderr +./calc.at:1454: cat stderr + | (* *) + (*) + (*) +input: + | 1 2 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1443: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ stderr: -1.11-17: error: null divisor -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1441: cat stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -input: stderr: +./calc.at:1457: $PREPARSER ./calc input stderr: stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1440: $PREPARSER ./calc input stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +2.1: syntax error +stderr: +syntax error +syntax error +error: 2222 != 1 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1459: cat stderr +526. calc.at:1434: ok +syntax error +syntax error +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.7: syntax error -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -syntax error ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (# + 1) = 1111 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1455: cat stderr +input: + | (- *) + (1 2) = 1 input: + | 1 + 2 * 3 + !+ ++ + | 1 + 2 * 3 + !+ ++ +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: cat stderr + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1458: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +2.1: syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -147029,104 +152351,132 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -147143,26 +152493,24 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1432: "$PERL" -pi -e 'use strict; +input: +./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1478: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -syntax error -error: 2222 != 1 -./calc.at:1453: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !- ++ +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147172,32 +152520,105 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr - | 1 2 -./calc.at:1455: $PREPARSER ./calc input +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 stderr: -./calc.at:1445: cat stderr +./calc.at:1455: $PREPARSER ./calc input stderr: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1476: cat stderr stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 syntax error - | (# + 1) = 1111 -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error +error: 2222 != 1 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 @@ -147207,104 +152628,132 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -147321,118 +152770,91 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1441: $PREPARSER ./calc input - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: +stdout: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1457: cat stderr -stderr: -./calc.at:1453: cat stderr -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +input: +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147442,20 +152864,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1432: cat stderr -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (* *) + (*) + (*) -./calc.at:1445: $PREPARSER ./calc input -528. calc.at:1437: ok +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +./calc.at:1451: cat stderr input: -./calc.at:1458: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147465,7 +152880,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +stderr: +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -147474,30 +152902,34 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token Next token is token number (1.3: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 stderr: -input: -./calc.at:1448: cat stderr + | (- *) + (1 2) = 1 + | (#) + (#) = 2222 + | 1 + 2 * 3 + !- ++ +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input +stderr: +./calc.at:1477: cat stderr +./calc.at:1459: $PREPARSER ./calc input +stderr: +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1435: $PREPARSER ./calc input +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -147507,198 +152939,269 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 120): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.1-6: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 92): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.17-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | error +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (* *) + (*) + (*) -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1458: $PREPARSER ./calc /dev/null -./calc.at:1476: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: "$PERL" -pi -e 'use strict; +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +stderr: +stderr: +stderr: +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147708,14 +153211,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: cat stderr stderr: -./calc.at:1476: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1435: cat stderr stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -147725,19 +153229,23 @@ Entering state 4 Stack now 0 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 4 2 +Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Stack now 0 4 Shifting token error () Entering state 11 Stack now 0 4 11 -Reading a token Next token is token '*' () Error: discarding token '*' () Error: popping token error () @@ -147768,13 +153276,25 @@ Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Stack now 0 8 20 4 Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Error: discarding token number (2) Error: popping token error () Stack now 0 8 20 4 Shifting token error () @@ -147793,7 +153313,7 @@ Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () @@ -147801,47 +153321,28 @@ -> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 7 (line 90): +Reducing stack by rule 6 (line 80): $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 Next token is token '\n' () @@ -147849,7 +153350,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -147867,35 +153368,8 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -stderr: -stdout: -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -524. calc.at:1432: ok -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -stderr: - | - | +1 -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1440: "$PERL" -pi -e 'use strict; +./calc.at:1458: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147905,24 +153379,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1426: cat stderr -./calc.at:1449: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh +566. calc.at:1491: testing Calculator glr2.cc %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1491: mv calc.y.tmp calc.y -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -147932,154 +153391,195 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 120): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.1-6: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 92): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.17-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: cat stderr -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -stderr: -stderr: -stderr: +./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: input: - -syntax error -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 - | 1 + 2 * 3 + !+ ++ -./calc.at:1468: cat stderr - | (1 + #) = 1111 +input: + | error +./calc.at:1476: cat stderr + | 1 + 2 * 3 + !- ++ +input: ./calc.at:1454: $PREPARSER ./calc input -2.1: syntax error -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1459: cat stderr -stderr: +./calc.at:1451: $PREPARSER ./calc input + | 1//2 + | (1 + #) = 1111 +./calc.at:1476: cat stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1479: $PREPARSER ./calc input +input: input: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -148090,6 +153590,83 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr + | 1 + 2 * 3 + !- ++ + | (* *) + (*) + (*) +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1440: cat stderr +./calc.at:1477: $PREPARSER ./calc input +stderr: +452. types.at:139: stderr: + ok +stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: cat stderr +1.1: syntax error +1.6: syntax error: invalid character: '#' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +input: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: cat stderr +input: +./calc.at:1448: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1438: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1469: cat stderr +input: +stderr: +stderr: +stderr: + | 1//2 +stderr: +./calc.at:1476: $PREPARSER ./calc input + | (- *) + (1 2) = 1 + | (#) + (#) = 2222 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1476: $PREPARSER ./calc input +stderr: +./calc.at:1458: $PREPARSER ./calc input +stderr: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: $PREPARSER ./calc /dev/null +stderr: +stderr: +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -148099,19 +153676,23 @@ Entering state 4 Stack now 0 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 4 2 +Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Stack now 0 4 Shifting token error () Entering state 11 Stack now 0 4 11 -Reading a token Next token is token '*' () Error: discarding token '*' () Error: popping token error () @@ -148142,13 +153723,25 @@ Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Stack now 0 8 20 4 Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Error: discarding token number (2) Error: popping token error () Stack now 0 8 20 4 Shifting token error () @@ -148167,7 +153760,7 @@ Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () @@ -148175,47 +153768,28 @@ -> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 7 (line 90): +Reducing stack by rule 6 (line 80): $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 Next token is token '\n' () @@ -148223,7 +153797,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -148241,27 +153815,16 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -549. calc.at:1477: testing Calculator glr2.cc %locations ... -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1449: $PREPARSER ./calc input +syntax error, unexpected '/', expecting number or '-' or '(' or '!' stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; +1.2: syntax error +1.10: syntax error +1.16: syntax error +1.1: syntax error +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -148271,198 +153834,134 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - | (1 + #) = 1111 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: $PREPARSER ./calc input -stderr: -2.1: syntax error input: input: +./calc.at:1449: cat stderr input: -./calc.at:1477: mv calc.y.tmp calc.y - -stderr: -./calc.at:1440: cat stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -stderr: - | error -./calc.at:1459: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - | (* *) + (*) + (*) -./calc.at:1441: cat stderr -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1457: cat stderr -stderr: -stderr: -stderr: -./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1453: cat stderr -syntax error -./calc.at:1458: cat stderr -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1443: cat stderr input: -./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr - stderr: +input: + | 1 + 2 * 3 + !+ ++ stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1435: cat stderr + | (#) + (#) = 2222 +./calc.at:1440: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1441: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1446: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1454: cat stderr +stderr: +stderr: +stderr: +1.1: syntax error +stderr: +syntax error +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' syntax error syntax error syntax error +error: 2222 != 1 +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1457: cat stderr +567. calc.at:1492: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1492: mv calc.y.tmp calc.y + +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + # + 1) = 1111 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -148472,151 +153971,116 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: $PREPARSER ./calc input -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1457: $PREPARSER ./calc input -syntax error -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +./calc.at:1455: cat stderr +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +stderr: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1449: $PREPARSER ./calc input +stderr: + | 1//2 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1443: $PREPARSER ./calc input +stderr: stderr: -input: -./calc.at:1458: $PREPARSER ./calc input stderr: -./calc.at:1455: cat stderr stderr: - | 1 + 2 * 3 + !- ++ -input: -syntax error -syntax error -syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 - | 1 + 2 * 3 + !+ ++ -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1441: $PREPARSER ./calc input -stderr: -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1445: cat stderr -stderr: -./calc.at:1435: cat stderr -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1476: cat stderr -stderr: -stderr: -input: -./calc.at:1459: cat stderr -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1448: cat stderr -input: +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 @@ -148626,96 +154090,104 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -148732,175 +154204,190 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1//2 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg - | 1 2 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1449: $PREPARSER ./calc input - | (# + 1) = 1111 -' expout || exit 77 -input: -./calc.at:1435: $PREPARSER ./calc input +stdout: +./calc.at:1478: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1448: $PREPARSER ./calc /dev/null - | 1 + 2 * 3 + !+ ++ +stdout: +syntax error +./calc.at:1480: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +1.1: syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) input: +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: cat stderr +./calc.at:1468: cat stderr input: -550. calc.at:1478: testing Calculator C++ %glr-parser %locations api.location.type={Span} ... -./calc.at:1478: mv calc.y.tmp calc.y - -stderr: -./calc.at:1445: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1476: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr + | (#) + (#) = 2222 +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 + | (#) + (#) = 2222 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1435: $PREPARSER ./calc input stderr: stderr: +input: +input: stderr: -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 stderr: -1.3: syntax error + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1480: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -148909,7 +154396,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -148921,21 +154408,6 @@ Stack now 0 8 22 Reading a token Next token is token '/' (1.3: ) -LAC: initial context established for '/' -LAC: checking lookahead '/': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' Error: popping token '/' (1.2: ) Stack now 0 8 @@ -148943,9 +154415,12 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1477: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1478: $PREPARSER ./calc input + | (#) + (#) = 2222 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -148955,16 +154430,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1459: $PREPARSER ./calc input stderr: 1.2: syntax error: invalid character: '#' -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.8: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +./calc.at:1491: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1457: $PREPARSER ./calc input +syntax error +syntax error +error: 2222 != 1 stderr: Starting parse Entering state 0 @@ -148975,96 +154451,104 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -149081,37 +154565,31 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: stderr: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: stderr: -syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '/' (1.3: ) -LAC: initial context established for '/' -LAC: checking lookahead '/': Err +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err LAC: checking lookahead end of file: Err LAC: checking lookahead number: S1 LAC: checking lookahead '=': Err @@ -149125,232 +154603,292 @@ LAC: checking lookahead '(': S4 LAC: checking lookahead ')': Err LAC: checking lookahead '!': S5 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 119): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +LAC: initial context discarded due to error recovery +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10-12: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) +Reducing stack by rule 5 (line 92): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 20 5 14 Reducing stack by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +stdout: +./calc.at:1477: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1478: cat stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr input: -./calc.at:1457: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1458: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -1.3: syntax error -./calc.at:1453: $PREPARSER ./calc input +./calc.at:1459: $PREPARSER ./calc input +stderr: + | (* *) + (*) + (*) stderr: +./calc.at:1468: $PREPARSER ./calc input stderr: +./calc.at:1445: cat stderr stderr: -./calc.at:1468: cat stderr -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.2: syntax error: invalid character: '#' -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149360,14 +154898,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -input: -./calc.at:1438: $PREPARSER ./calc input - | (* *) + (*) + (*) -1.1: syntax error -stderr: -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1454: "$PERL" -pi -e 'use strict; +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149377,149 +154908,87 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1458: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '+' () -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 input: input: -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1492: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS | 1 = 2 = 3 -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1458: $PREPARSER ./calc input stderr: -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1477: $PREPARSER ./calc input +input: +./calc.at:1451: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 2 +./calc.at:1480: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1438: cat stderr +./calc.at:1441: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' stderr: -./calc.at:1441: cat stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1426: cat stderr -stderr: -stderr: -./calc.at:1455: cat stderr -stderr: -stderr: -./calc.at:1440: cat stderr -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -149529,102 +154998,217 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 119): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.1-4: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.1-6: ) +LAC: initial context discarded due to error recovery +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 92): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: cat stderr -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error syntax error +syntax error +input: +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +' expout || exit 77 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1448: "$PERL" -pi -e 'use strict; +input: +input: +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149634,115 +155218,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1454: cat stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) + | (* *) + (*) + (*) + | (# + 1) = 1111 stderr: +./calc.at:1478: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1458: $PREPARSER ./calc input stderr: - | 1 + 2 * 3 + !- ++ -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -input: +1.7: syntax error stderr: -./calc.at:1477: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | error -551. calc.at:1478: testing Calculator glr2.cc %locations api.location.type={Span} ... -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -149753,18 +155250,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: mv calc.y.tmp calc.y - -./calc.at:1449: cat stderr -syntax error -input: -input: -input: -./calc.at:1445: $PREPARSER ./calc input -input: -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1459: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149774,468 +155261,278 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1448: cat stderr - | (#) + (#) = 2222 -./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - | 1 + 2 * 3 + !+ ++ - | (1 + 1) / (1 - 1) -./calc.at:1440: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) stderr: -./calc.at:1426: $PREPARSER ./calc input +syntax error, unexpected number +stderr: +stderr: +./calc.at:1445: $PREPARSER ./calc input +stdout: stderr: -./calc.at:1454: $PREPARSER ./calc input -input: stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 13 Stack now 0 8 20 5 13 Reducing stack by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1441: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1453: cat stderr +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -input: stderr: -./calc.at:1457: cat stderr +./calc.at:1482: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' stderr: -./calc.at:1435: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 +syntax error +syntax error +syntax error 1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | 1//2 -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1476: cat stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: $PREPARSER ./calc input - | (#) + (#) = 2222 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Stack now 0 8 20 4 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 4 12 20 29 +Stack now 0 8 20 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1458: cat stderr -./calc.at:1438: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +input: +./calc.at:1469: cat stderr +1.7: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1477: cat stderr +./calc.at:1479: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -150245,407 +155542,457 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: cat stderr +input: + | 1 2 +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1478: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + | 1 2 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1457: cat stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1448: cat stderr stderr: -1.3: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1476: cat stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: stderr: -1.6: syntax error: invalid character: '#' +./calc.at:1438: $PREPARSER ./calc input stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1440: cat stderr stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +input: +1.3: syntax error +./calc.at:1449: cat stderr stderr: - | 1 + 2 * 3 + !+ ++ +./calc.at:1454: cat stderr +./calc.at:1435: cat stderr +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: 1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1438: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1457: $PREPARSER ./calc input +./calc.at:1459: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 13 Stack now 0 8 20 5 13 Reducing stack by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.10: syntax error +1.16: syntax error +syntax error, unexpected number input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1455: cat stderr +1.6: syntax error: invalid character: '#' + | error input: - | (- *) + (1 2) = 1 -1.3: syntax error +input: +./calc.at:1446: cat stderr +./calc.at:1476: cat stderr + | (1 + #) = 1111 +./calc.at:1457: $PREPARSER ./calc input stderr: +input: + | 1 + 2 * 3 + !+ ++ stderr: + | error + | error +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1443: $PREPARSER ./calc input +input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 4 12 20 29 +Stack now 0 8 20 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 -Stack now 0 8 22 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 8 22 4 12 19 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -150662,85 +156009,54 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | - | +1 -./calc.at:1468: cat stderr -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: $PREPARSER ./calc input stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1469: $PREPARSER ./calc input stderr: +./calc.at:1477: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1455: cat stderr -./calc.at:1476: $PREPARSER ./calc input -input: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !- ++ -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1482: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (1 + #) = 1111 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected invalid token input: -./calc.at:1426: $PREPARSER ./calc input stderr: +./calc.at:1448: $PREPARSER ./calc input stderr: -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.3: syntax error +input: stderr: - | (1 + # + 1) = 1111 -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 input: -./calc.at:1454: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 syntax error -./calc.at:1453: "$PERL" -pi -e 'use strict; + | (# + 1) = 1111 +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1459: cat stderr +1.3: syntax error ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -150750,14 +156066,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1438: $PREPARSER ./calc input - | - | +1 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1448: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -150767,171 +156078,88 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !+ ++ +input: +stderr: +stderr: +./calc.at:1449: $PREPARSER ./calc input +stderr: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +stderr: +syntax error ' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; + | (1 + #) = 1111 +stderr: + | (* *) + (*) + (*) +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 = 2 = 3 +./calc.at:1446: $PREPARSER ./calc input my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: cat stderr - | 1 = 2 = 3 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1458: cat stderr +1.2: syntax error: invalid character: '#' input: -./calc.at:1455: $PREPARSER ./calc input +stderr: input: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ +input: + | (1 + #) = 1111 +./calc.at:1440: $PREPARSER ./calc input +syntax error, unexpected invalid token +syntax error + | (1 + #) = 1111 +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1441: cat stderr +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr 1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' + | (* *) + (*) + (*) stderr: stderr: -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1440: cat stderr -syntax error -./calc.at:1453: cat stderr -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: -./calc.at:1435: cat stderr - | (1 + #) = 1111 -./calc.at:1454: $PREPARSER ./calc input +syntax error +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1451: cat stderr stderr: -./calc.at:1478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: -./calc.at:1441: cat stderr Starting parse Entering state 0 Stack now 0 @@ -150940,7 +156168,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -150955,23 +156183,13 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '=' (1.7: ) -LAC: checking lookahead '=': Err -LAC: checking lookahead end of file: R6 G8 Err -LAC: checking lookahead number: R6 G8 Err -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -LAC: checking lookahead NEG: R6 G8 Err -LAC: checking lookahead '^': S23 1.7: syntax error, unexpected '=' Error: popping nterm exp (1.5: 2) Stack now 0 8 18 @@ -150981,735 +156199,997 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1448: cat stderr -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (7) +Shifting token "number" (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) +-> $$ = nterm exp (7) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (5) +Shifting token "number" (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 8 18 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: -stderr: -stderr: - | error -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1445: cat stderr -./calc.at:1457: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) Entering state 27 -Stack now 0 8 18 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '=' (1.7: ) -LAC: checking lookahead '=': Err -LAC: checking lookahead end of file: R6 G8 Err -LAC: checking lookahead number: R6 G8 Err -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -LAC: checking lookahead NEG: R6 G8 Err -LAC: checking lookahead '^': S23 -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -input: -1.6: syntax error: invalid character: '#' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1435: $PREPARSER ./calc input - | (1 + #) = 1111 -stderr: -./calc.at:1449: $PREPARSER ./calc input -input: -530. calc.at:1440: ok -./calc.at:1453: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1458: cat stderr - | (!!) + (1 2) = 1 -1.11-17: error: null divisor -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -input: -1.11: syntax error -1.1-16: error: 2222 != 1 -531. calc.at:1441: 1.1: syntax error - ok -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -input: -./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (* *) + (*) + (*) -./calc.at:1458: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | (#) + (#) = 2222 -stderr: -./calc.at:1445: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - -1.11: syntax error -1.1-16: error: 2222 != 1 -1.1: syntax error -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -./calc.at:1455: cat stderr -stdout: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: cat stderr -./calc.at:1454: cat stderr -./calc.at:1438: cat stderr -stderr: -input: -./types.at:139: $PREPARSER ./test -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1443: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - - | 1 + 2 * 3 + !- ++ -./calc.at:1459: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (256) +Shifting token "number" (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Next token is token ')' () Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 +-> $$ = nterm exp (4) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token Next token is token '=' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -input: -./calc.at:1426: cat stderr -./calc.at:1457: cat stderr - | - | +1 - -stderr: -./calc.at:1476: $PREPARSER ./calc /dev/null -stderr: -stderr: -stderr: -./calc.at:1455: $PREPARSER ./calc input -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.6: syntax error: invalid character: '#' -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: -./calc.at:1435: cat stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1468: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -LAC: checking lookahead end of file: S16 -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -input: -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1454: $PREPARSER ./calc input -stderr: -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -input: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -input: - | (#) + (#) = 2222 -527. calc.at:1435: ok -stderr: -stderr: -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1468: $PREPARSER ./calc /dev/null -stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 20 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.1-8: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1443: $PREPARSER ./calc input - | (#) + (#) = 2222 -======== Testing with C++ standard flags: '' -./calc.at:1457: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -LAC: checking lookahead end of file: S16 -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 +stderr: +syntax error +syntax error +syntax error +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -151734,107 +157214,65 @@ Entering state 20 Stack now 0 4 12 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 8 22 4 12 19 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -151851,11 +157289,15 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1459: cat stderr - -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: cat stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -151865,24 +157307,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: cat stderr -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: cat stderr +1.6: syntax error: invalid character: '#' stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -552. calc.at:1479: testing Calculator C++ %glr-parser %header parse.error=verbose %name-prefix "calc" %verbose ... stderr: -553. calc.at:1479: testing Calculator glr2.cc %header parse.error=verbose %name-prefix "calc" %verbose ... -syntax error -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: input: stderr: +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -./calc.at:1453: cat stderr +input: + | (# + 1) = 1111 +1.2: syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ + | (#) + (#) = 2222 Starting parse Entering state 0 Stack now 0 @@ -151892,1250 +157346,1234 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 8 22 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 +Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 +Next token is token '+' (1.13: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R7 G8 S24 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1479: mv calc.y.tmp calc.y - -./calc.at:1479: mv calc.y.tmp calc.y - -syntax error: invalid character: '#' -syntax error: invalid character: '#' syntax error -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: - | (#) + (#) = 2222 -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (- *) + (1 2) = 1 -./calc.at:1448: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1449: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -1.2: syntax error: invalid character: '#' -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -stderr: -stderr: -stderr: -1.7: syntax error syntax error -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: cat stderr +syntax error +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1478: cat stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -554. calc.at:1480: testing Calculator C++ %glr-parser parse.error=verbose api.prefix={calc} %verbose ... -input: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) +-> $$ = nterm exp (7) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 6 8 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 6 8 20 29 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 10 -Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 2 10 23 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 4 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 2 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 8 19 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) +Next tokenstderr: + | + | +1 + is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 4 12 23 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Stack now 0 6 8 18 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) - | (# + 1) = 1111 -./calc.at:1454: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +input: +stderr: +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1477: $PREPARSER ./calc input ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +stderr: ' expout || exit 77 -./calc.at:1480: mv calc.y.tmp calc.y - -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +stderr: +input: +stderr: + | 1 2 +./calc.at:1482: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' + | 1 + 2 * 3 + !+ ++ +input: +1.2: syntax error: invalid character: '#' ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -153146,72 +158584,346 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1438: cat stderr -stderr: -1.7: syntax error +./calc.at:1445: cat stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: cat stderr +./calc.at:1459: cat stderr +./calc.at:1443: cat stderr +./calc.at:1451: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: stderr: -./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1480: cat stderr +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '+' (1.13: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R7 G8 S24 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +2.1: syntax error +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 Stack now 0 8 20 29 +Reading a token Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 Next token is token '=' (1.11: ) @@ -153219,31 +158931,31 @@ Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -153253,1032 +158965,236 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: cat stderr +stderr: +input: +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1438: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 + | 1 = 2 = 3 +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1443: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +input: +input: + | 1//2 +stderr: +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +stderr: +1.2: syntax error: invalid character: '#' + | (1 + # + 1) = 1111 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Stack now 0 6 8 20 29 21 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 +Stack now 0 8 18 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 6 8 18 2 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 19 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 19 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1457: cat stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of file (1.1: ) -LAC: initial context established for end of file -LAC: checking lookahead end of file: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1454: cat stderr +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !- ++ +2.1: syntax error +input: stderr: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of file (1.1: ) -LAC: initial context established for end of file -LAC: checking lookahead end of file: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -529. calc.at:1438: ok -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: cat stderr ./calc.at:1476: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1449: $PREPARSER ./calc input ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1459: cat stderr -./calc.at:1458: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' - | (1 + #) = 1111 -input: +./calc.at:1435: cat stderr +stderr: + | (# + 1) = 1111 +stderr: + | 1//2 ./calc.at:1457: $PREPARSER ./calc input -input: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1443: $PREPARSER ./calc input ./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -154289,37 +159205,95 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1448: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1448: cat stderr +stderr: +input: +input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +input: stderr: stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1477: $PREPARSER ./calc input input: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1479: $PREPARSER ./calc input stderr: -./calc.at:1449: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1454: $PREPARSER ./calc input -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -input: -syntax error: invalid character: '#' - | (1 + #) = 1111 +1.3: syntax error stderr: -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 + | 1 + 2 * 3 + !- ++ +./calc.at:1468: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -154334,38 +159308,53 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1468: cat stderr -./calc.at:1455: cat stderr -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1426: cat stderr -stderr: 1.6: syntax error: invalid character: '#' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | - | +1 -syntax error: invalid character: '#' -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1453: "$PERL" -pi -e 'use strict; + | (# + 1) = 1111 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1469: cat stderr +syntax error, unexpected '=' +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1440: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - -1.6: syntax error: invalid character: '#' my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" @@ -154373,172 +159362,268 @@ }eg ' expout || exit 77 stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1448: cat stderr -1.6: syntax error: invalid character: '#' -2.1: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) stderr: -./calc.at:1479: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -input: -input: +1.3: syntax error +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 4 12 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) Stack now 0 4 12 -Error: popping nterm exp (1) +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1455: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 - | (1 + #) = 1111 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' +input: ./calc.at:1458: cat stderr -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1453: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (* *) + (*) + (*) -./calc.at:1457: cat stderr +input: +input: +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 + | (!!) + (1 2) = 1 ./calc.at:1448: $PREPARSER ./calc input -2.1: syntax error + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !- ++ +./calc.at:1454: cat stderr +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1435: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1455: cat stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: +1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1446: $PREPARSER ./calc input input: -stdout: -./calc.at:1454: "$PERL" -pi -e 'use strict; + | (# + 1) = 1111 +./calc.at:1440: $PREPARSER ./calc input +input: + | 1 = 2 = 3 +./calc.at:1443: cat stderr +./calc.at:1469: $PREPARSER ./calc input +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -154548,24 +159633,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 -./types.at:139: ./check -./calc.at:1458: $PREPARSER ./calc input -syntax error: invalid character: '#' -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -1.6: syntax error: invalid character: '#' -1.2: syntax error -1.10: syntax error -1.16: syntax error +stderr: +1.3: syntax error +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 ./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -154576,561 +159651,350 @@ }eg ' expout || exit 77 stderr: -./calc.at:1479: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 4 12 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) Stack now 0 4 12 -Error: popping nterm exp (1) +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.3: syntax error +./calc.at:1477: cat stderr +1.2: syntax error: invalid character: '#' +stderr: +stderr: +stderr: +syntax error, unexpected '=' +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 4 12 20 29 +Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 4 12 20 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token ')' (1.18: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -LAC: initial context discarded due to shift +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 20 4 +Stack now 0 4 Reading a token -Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.23: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23: ) +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.25: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23-25: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.27: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23-27: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': R9 G12 S21 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -LAC: initial context discarded due to shift -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -LAC: initial context discarded due to shift +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: stderr: -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1441: cat stderr stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1459: cat stderr +input: | (1 + # + 1) = 1111 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1443: cat stderr - | (# + 1) = 1111 -./calc.at:1457: $PREPARSER ./calc input -syntax error: invalid character: '#' -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1476: cat stderr stderr: -./calc.at:1454: cat stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +stderr: + | (# + 1) = 1111 + | + | +1 +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1455: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -155140,450 +160004,142 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.25: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23-25: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.27: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23-27: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': R9 G12 S21 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -LAC: initial context discarded due to shift -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -LAC: initial context discarded due to shift +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.6: syntax error: invalid character: '#' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -syntax error: invalid character: '#' input: -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1459: cat stderr - | 1//2 -./calc.at:1443: $PREPARSER ./calc input -555. calc.at:1480: testing Calculator glr2.cc parse.error=verbose api.prefix={calc} %verbose ... -./calc.at:1480: mv calc.y.tmp calc.y - +1.11: syntax error +1.1-16: error: 2222 != 1 + | (#) + (#) = 2222 +stderr: + | (1 + #) = 1111 +1.2: syntax error: invalid character: '#' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +stderr: +./calc.at:1482: cat stderr +./calc.at:1441: $PREPARSER ./calc input +stderr: +./calc.at:1477: $PREPARSER ./calc input +stderr: 1.6: syntax error: invalid character: '#' stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1445: "$PERL" -pi -e 'use strict; +syntax error +stdout: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +1.2: syntax error: invalid character: '#' +./types.at:139: $PREPARSER ./test +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155593,41 +160149,117 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1480: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token number (1.1: 1) Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R9 G29 R7 G8 S20 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -input: -./calc.at:1455: "$PERL" -pi -e 'use strict; +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 121): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1449: cat stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155637,7 +160269,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: "$PERL" -pi -e 'use strict; +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155647,49 +160285,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (# + 1) = 1111 -./calc.at:1459: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1476: cat stderr -./calc.at:1445: cat stderr -1.11-17: error: null divisor -./calc.at:1448: cat stderr -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: cat stderr -stderr: -./calc.at:1458: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -1.2: syntax error: invalid character: '#' -./calc.at:1453: "$PERL" -pi -e 'use strict; +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155699,145 +160295,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: cat stderr -stderr: -input: -1.11-17: error: null divisor - | 1 + 2 * 3 + !+ ++ -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1468: cat stderr -input: -./calc.at:1449: $PREPARSER ./calc /dev/null -./calc.at:1453: cat stderr -input: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 - | (# + 1) = 1111 -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1426: cat stderr -stderr: -stderr: -input: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -input: - | (1 + #) = 1111 -stderr: -stderr: -./calc.at:1458: $PREPARSER ./calc input -input: -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: - | (!!) + (1 2) = 1 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1454: "$PERL" -pi -e 'use strict; +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155847,39 +160324,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1457: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1468: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1455: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr -syntax error -error: 2222 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.11-17: error: null divisor -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1437: cat stderr stderr: - | (# + 1) = 1111 -./calc.at:1459: cat stderr -1.6: syntax error: invalid character: '#' -syntax error -error: 2222 != 1 -./calc.at:1454: cat stderr stderr: -./calc.at:1443: cat stderr -./calc.at:1426: $PREPARSER ./calc input + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !- ++ stderr: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +./calc.at:1445: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1468: cat stderr stderr: -1.1: syntax error Starting parse Entering state 0 Stack now 0 @@ -155889,493 +160360,242 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 120): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -LAC: initial context discarded due to shift +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -1.11-17: error: null divisor - | (1 + # + 1) = 1111 -syntax error -error: 2222 != 1 -./calc.at:1457: $PREPARSER ./calc input -input: -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -input: -syntax error -error: 2222 != 1 - | 1 + 2 * 3 + !- ++ -stderr: - | error - | (1 + # + 1) = 1111 -./calc.at:1459: $PREPARSER ./calc input -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1443: $PREPARSER ./calc input +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1478: cat stderr +1.2: syntax error: invalid character: '#' stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1451: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 120): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.14: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -LAC: initial context discarded due to shift -Entering state 18 -Stack now 0 8 18 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 92): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '+' (1.11: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R9 G29 R7 G8 S20 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -539. calc.at:1454: ok -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 121): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' 1.6: syntax error: invalid character: '#' stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1485: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1477: cat stderr +./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -./calc.at:1453: cat stderr -syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -156385,46 +160605,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: "$PERL" -pi -e 'use strict; +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1449: cat stderr -stderr: -./calc.at:1458: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1480: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1445: cat stderr -input: -input: -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: cat stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -156434,23 +160631,125 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +./calc.at:1478: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: cat stderr -./calc.at:1476: cat stderr +./calc.at:1438: cat stderr +./calc.at:1453: cat stderr +./calc.at:1480: cat stderr +input: + | (#) + (#) = 2222 ./calc.at:1449: $PREPARSER ./calc input -./calc.at:1468: cat stderr - | (# + 1) = 1111 -./calc.at:1448: "$PERL" -pi -e 'use strict; + | 1//2 +stderr: +./calc.at:1482: $PREPARSER ./calc input +stderr: +stderr: +1.11-17: error: null divisor +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +444. types.at:139: ok +input: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1485: $PREPARSER ./calc input + +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -156460,62 +160759,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1459: cat stderr -538. calc.at:1453: ok -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1457: cat stderr -./calc.at:1443: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1455: $PREPARSER ./calc input -stderr: - -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -input: -input: -input: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -./calc.at:1426: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1445: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 - | (- *) + (1 2) = 1 -./calc.at:1476: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' + | (#) + (#) = 2222 ./calc.at:1468: $PREPARSER ./calc input -input: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: cat stderr stderr: - | 1 = 2 = 3 - | (1 + 1) / (1 - 1) -./calc.at:1457: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 stderr: -./calc.at:1443: $PREPARSER ./calc input stderr: stderr: -./calc.at:1459: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () syntax error -error: 2222 != 1 -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -156525,1978 +160797,1233 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 119): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -LAC: initial context discarded due to shift +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error -syntax error -error: 2222 != 1 +1.1: syntax error +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +568. calc.at:1492: testing Calculator glr2.cc %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1492: mv calc.y.tmp calc.y + +./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +input: +input: +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1458: cat stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: cat stderr +input: +input: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1454: cat stderr +./calc.at:1446: cat stderr +input: +input: +input: + | (#) + (#) = 2222 +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: $PREPARSER ./calc input + | error + | error + | (1 + 1) / (1 - 1) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1438: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ + | (1 + # + 1) = 1111 +stderr: + | error +./calc.at:1455: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1457: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Stack now 0 4 12 -Error: popping nterm exp (1) -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () - | (1 + # + 1) = 1111 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -1.11-17: error: null divisor -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) -syntax error -syntax error -error: 2222 != 1 - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1458: cat stderr - | (#) + (#) = 2222 -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1448: $PREPARSER ./calc input -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 4 2 Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 119): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '=' (1.15: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Stack now 0 4 12 -Error: popping nterm exp (1) -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Entering state 10 Reading a token Next token is token ')' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -syntax error: invalid character: '#' -syntax error -syntax error -error: 2222 != 1 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -input: -stderr: -stderr: - | (1 + # + 1) = 1111 -./calc.at:1458: $PREPARSER ./calc input -1.11-17: error: null divisor -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error: invalid character: '#' -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1449: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: cat stderr -./calc.at:1459: cat stderr -./calc.at:1443: cat stderr -stderr: -./calc.at:1455: cat stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -556. calc.at:1482: testing Calculator C++ %glr-parser %debug ... -1.6: syntax error: invalid character: '#' -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1482: mv calc.y.tmp calc.y - -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - | (!!) + (1 2) = 1 - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -541. calc.at:1457: ok -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1476: cat stderr -543. calc.at:1459: ok -stderr: - | (* *) + (*) + (*) -input: -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1468: cat stderr - | - | +1 -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: cat stderr -./calc.at:1445: cat stderr -557. calc.at:1482: testing Calculator glr2.cc %debug ... -./calc.at:1448: cat stderr -stderr: -input: -./calc.at:1482: mv calc.y.tmp calc.y - -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 - | (* *) + (*) + (*) -input: -stderr: -./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1426: cat stderr -input: -stderr: -./calc.at:1476: $PREPARSER ./calc input - | (* *) + (*) + (*) - | (1 + 1) / (1 - 1) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1458: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R7 G8 S24 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: - -stderr: -stderr: - - | (1 + 1) / (1 - 1) -stderr: -1.11-17: error: null divisor -stderr: -./calc.at:1445: $PREPARSER ./calc input -input: -stdout: -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 - | (1 + #) = 1111 -stderr: -syntax error -syntax error -syntax error -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./types.at:139: $PREPARSER ./test -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -syntax error -syntax error -error: null divisor -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (4) +Shifting token number (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R7 G8 S24 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -1.11-17: error: null divisor -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Stack now 0 8 22 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 22 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 22 4 12 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 8 22 4 12 19 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 -Stack now 0 8 22 4 12 19 28 Reading a token Next token is token ')' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (2) $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 8 22 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = nterm exp (0) + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Stack now 0 8 22 31 +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack by rule 10 (line 93): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor + $2 = token '=' () + $3 = nterm exp (2) -> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -syntax error -syntax error -syntax error -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -error: null divisor -./calc.at:1443: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: $PREPARSER ./calc /dev/null -======== Testing with C++ standard flags: '' -./calc.at:1449: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1458: cat stderr -./calc.at:1455: cat stderr -stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Stack now 0 4 12 20 29 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token ')' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Stack now 0 8 22 +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 22 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 22 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Stack now 0 8 22 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = nterm exp (0) + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Stack now 0 8 22 31 +-> $$ = nterm exp (4) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token end of file () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -542. calc.at:1458: ok -stdout: - | (- *) + (1 2) = 1 +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: cat stderr +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: stderr: -./calc.at:1449: $PREPARSER ./calc input Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1451: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1476: cat stderr -stderr: -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -558. calc.at:1485: testing Calculator C++ %glr-parser parse.error=detailed %debug %name-prefix "calc" %verbose ... -559. calc.at:1485: testing Calculator glr2.cc parse.error=detailed %debug %name-prefix "calc" %verbose ... -./calc.at:1485: mv calc.y.tmp calc.y - -./calc.at:1448: cat stderr -input: -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: cat stderr -./calc.at:1485: mv calc.y.tmp calc.y - -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1476: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1451: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () stderr: -input: +1.11-17: error: null divisor +1.1: syntax error stderr: - -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1443: cat stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: cat stderr +syntax error: invalid character: '#' stderr: stderr: +1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.11-17: error: null divisor +syntax error, unexpected invalid token +./calc.at:1446: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -input: +1.1: syntax error stderr: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' (1.11: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R9 G29 R7 G8 S20 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -LAC: initial context discarded due to shift +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 121): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: cat stderr - | (# + 1) = 1111 -./calc.at:1448: $PREPARSER ./calc input -522. calc.at:1426: ok -stderr: -stderr: -input: -./calc.at:1482: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) Starting parse Entering state 0 Stack now 0 @@ -158570,457 +162097,909 @@ Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 121): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 122): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '-' (1.14: ) Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) - | 1 + 2 * 3 + !- ++ stderr: -./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.1: syntax error 1.2: syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -533. calc.at:1445: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - ok -./calc.at:1443: $PREPARSER ./calc input +1.8: syntax error: invalid character: '#' +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1476: $PREPARSER ./calc input input: - | 1 + 2 * 3 + !- ++ -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1449: cat stderr -stderr: -stderr: -./calc.at:1482: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS - | 1 2 +./calc.at:1476: cat stderr input: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: cat stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !- ++ -./calc.at:1455: $PREPARSER ./calc input -stderr: -./calc.at:1451: $PREPARSER ./calc input -stderr: - -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr +./calc.at:1476: $PREPARSER ./calc input + | + | +1 + | (1 + # + 1) = 1111 +./calc.at:1479: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) + | (1 + # + 1) = 1111 +./calc.at:1454: $PREPARSER ./calc input stderr: -input: +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1458: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 4 12 20 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token number (7) +Shifting token number (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) +-> $$ = nterm exp (7) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 -Stack now 0 8 20 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 8 20 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of inp1.3: syntax error -stderr: - | (* *) + (*) + (*) -./calc.at:1449: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '+' (1.11: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R9 G29 R7 G8 S20 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 122): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -ut (2.1: ) -Shifting token end of input (2.1: ) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: - -./calc.at:1448: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -159030,14 +163009,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -560. calc.at:1486: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" %verbose ... +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +./calc.at:1443: $PREPARSER ./calc /dev/null +stderr: +stderr: +1.6: syntax error: invalid character: '#' +stderr: +1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.1: syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +stderr: Starting parse Entering state 0 Stack now 0 @@ -159121,8 +163118,10 @@ Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stderr: +syntax error, unexpected '+' +syntax error: invalid character: '#' +1.11-17: error: null divisor +1.11-17: error: null divisor Starting parse Entering state 0 Stack now 0 @@ -159132,298 +163131,195 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 20 4 12 20 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 +Stack now 0 4 12 20 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -159433,32 +163329,23 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token end of inp1.2: syntax error -1.10: syntax error -1.16: syntax error -ut (2.1: ) +Next token is token end of input (2.1: ) Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: mv calc.y.tmp calc.y - -1.3: syntax error -./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1448: cat stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +syntax error, unexpected invalid token +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: cat stderr +./calc.at:1469: cat stderr +input: +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -159468,6 +163355,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1441: cat stderr +input: ./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -159478,23 +163369,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | + | +1 + | 1 2 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1485: $PREPARSER ./calc input + | (1 + # + 1) = 1111 stderr: -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -./calc.at:1476: cat stderr -./calc.at:1485: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./types.at:139: ./check -./calc.at:1455: cat stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1435: $PREPARSER ./calc input +stderr: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1445: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -159504,8 +163401,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -159515,56 +163411,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1485: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1468: cat stderr -stdout: -stderr: - | (1 + # + 1) = 1111 -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1443: cat stderr -stdout: -input: -input: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $PREPARSER ./test - | (#) + (#) = 2222 - | (#) + (#) = 2222 stderr: -./calc.at:1451: cat stderr -./calc.at:1476: $PREPARSER ./calc input -stdout: -./calc.at:1449: cat stderr -./calc.at:1455: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' -./types.at:139: $PREPARSER ./test -input: -input: stderr: stderr: - | (#) + (#) = 2222 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1443: $PREPARSER ./calc input -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1468: $PREPARSER ./calc input -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 stderr: - | 1 + 2 * 3 + !+ ++ +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1449: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' +syntax error, unexpected '+' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +1.6: syntax error: invalid character: '#' stderr: +1.11-17: error: null divisor Starting parse Entering state 0 Stack now 0 @@ -159574,142 +163440,302 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-8: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' +syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) stderr: -562. calc.at:1487: testing Calculator glr2.cc parse.error=custom %debug %name-prefix "calc" %verbose ... -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1455: cat stderr +543. calc.at:1459: ok +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1476: cat stderr + +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: cat stderr +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +' expout || exit 77 +./calc.at:1492: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +input: + | (#) + (#) = 2222 + | (# + 1) = 1111 +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1441: $PREPARSER ./calc input + | + | +1 +./calc.at:1469: $PREPARSER ./calc input stderr: -./calc.at:1451: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +stderr: +stdout: +./calc.at:1486: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +./calc.at:1457: cat stderr +input: +./calc.at:1449: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1451: cat stderr +./calc.at:1455: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1476: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +syntax error +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +syntax error +input: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -159719,152 +163745,276 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.1-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1487: mv calc.y.tmp calc.y - -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -561. calc.at:1486: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" %verbose ... -./calc.at:1486: mv calc.y.tmp calc.y +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: $PREPARSER ./calc input +569. calc.at:1494: testing Calculator C++ %glr-parser %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1494: mv calc.y.tmp calc.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1458: cat stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: cat stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: cat stderr +./calc.at:1437: cat stderr + | (1 + #) = 1111 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1487: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -======== Testing with C++ standard flags: '' -1.6: syntax error: invalid character: '#' stderr: Starting parse Entering state 0 @@ -160001,812 +164151,847 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: -stderr: -stderr: -./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 +Next token is token "number" (7) +Shifting token "number" (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) +-> $$ = nterm exp (7) +Entering state 27 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) +Entering state 10 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error: invalid character: '#' -syntax error: invalid character: '#' -1.3: syntax error -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1476: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error -./calc.at:1455: cat stderr -stderr: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: -input: -./calc.at:1443: cat stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1455: $PREPARSER ./calc input -stderr: - | (1 + #) = 1111 -./calc.at:1448: cat stderr -./calc.at:1468: cat stderr -./calc.at:1476: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: cat stderr -stderr: -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1443: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (1 + #) = 1111 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1486: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 4 2 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (4) +Shifting token "number" (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1448: $PREPARSER ./calc input -input: - | error -syntax error: invalid character: '#' -stderr: -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1449: cat stderr -stderr: -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -1.11-17: error: null divisor -stderr: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (#) + (#) = 2222 -1.1: syntax error -./calc.at:1449: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token "number" (256) +Shifting token "number" (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) +Entering state 27 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.11-17: error: null divisor -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1455: cat stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -160815,9 +165000,6 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg -' expout || exit 77 -stderr: -1.1: syntax error ./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -160828,57 +165010,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg +./calc.at:1478: cat stderr ' expout || exit 77 -./calc.at:1486: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1487: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1443: cat stderr +./calc.at:1478: cat stderr +./calc.at:1482: cat stderr input: -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: +./calc.at:1477: cat stderr ./calc.at:1468: cat stderr +./calc.at:1480: cat stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (1 + 1) / (1 - 1) +./calc.at:1457: $PREPARSER ./calc input +stderr: + | (1 + #) = 1111 +stderr: +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1451: $PREPARSER ./calc input +stderr: stderr: -./calc.at:1476: cat stderr Starting parse Entering state 0 Stack now 0 @@ -160894,34 +165047,25 @@ Entering state 11 Stack now 0 4 11 Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err Error: discarding token invalid token (1.2: ) Error: popping token error (1.1-2: ) Stack now 0 4 -LAC: initial context discarded due to error recovery Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err Error: discarding token '+' (1.4: ) Error: popping token error (1.1-2: ) Stack now 0 4 -LAC: initial context discarded due to error recovery Shifting token error (1.1-4: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token number (1.6: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err Error: discarding token number (1.6: 1) Error: popping token error (1.1-4: ) Stack now 0 4 -LAC: initial context discarded due to error recovery Shifting token error (1.1-6: ) Entering state 11 Stack now 0 4 11 @@ -160930,7 +165074,7 @@ Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.1-6: ) $3 = token ')' (1.7: ) @@ -160947,16 +165091,14 @@ Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) @@ -160965,321 +165107,1047 @@ Stack now 0 8 Next token is token '\n' (1.15-2.0: ) Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1448: cat stderr -input: -input: -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1443: $PREPARSER ./calc input - | (# + 1) = 1111 -stderr: -./calc.at:1476: $PREPARSER ./calc input -input: -stdout: -stderr: -./calc.at:1451: cat stderr stderr: -535. calc.at:1448: ok -./calc.at:1449: cat stderr +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (7) +Shifting token "number" (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) +-> $$ = nterm exp (7) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (5) +Shifting token "number" (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' (1.4: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token number (1.6: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 8 18 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (4) +Shifting token "number" (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (256) +Shifting token "number" (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (64) +Shifting token "number" (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: - | (# + 1) = 1111 -./types.at:139: ./check -Starting parse +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +syntax error +542. calc.at:1458: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.10: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1468: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: +Cleanup: popping token end of input () +Cleanup: popping nterm input () + ok +528. calc.at:1437: ok + + +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: input: +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1440: cat stderr +./calc.at:1453: $PREPARSER ./calc input stderr: stderr: -syntax error: invalid character: '#' +./calc.at:1446: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1438: $PREPARSER ./calc input stderr: - | 1 = 2 = 3 -./calc.at:1451: $PREPARSER ./calc input -stdout: stderr: +error: null divisor +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -161289,179 +166157,151 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error (1.2-4: ) +LAC: initial context discarded due to error recovery +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token invalid token (1.8: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) Stack now 0 8 20 4 -Shifting token error (1.10: ) +LAC: initial context discarded due to error recovery +Shifting token error (1.1-8: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (1.11: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.17-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error: invalid character: '#' -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -stdout: -' expout || exit 77 -./types.at:139: ./check -stderr: -./types.at:139: ./check -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error +570. calc.at:1494: testing Calculator glr2.cc %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1494: mv calc.y.tmp calc.y + +571. calc.at:1504: testing Calculator lalr1.d ... +./calc.at:1504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +input: +input: +input: +input: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +input: +./calc.at:1494: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 | (1 + #) = 1111 -./calc.at:1449: $PREPARSER ./calc input -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1455: cat stderr -1.7: syntax error -stderr: -1.6: syntax error: invalid character: '#' - ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -161472,62 +166312,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' + | 1 2 + | 1 = 2 = 3 ./calc.at:1443: cat stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: - | (1 + # + 1) = 1111 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1443: $PREPARSER ./calc input + | error + | (1 + #) = 1111 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1479: cat stderr +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1468: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1477: $PREPARSER ./calc input stderr: stderr: +1.6: syntax error: invalid character: '#' +stderr: +1.11-17: error: null divisor Starting parse Entering state 0 Stack now 0 @@ -161541,7 +166344,7 @@ Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 @@ -161562,34 +166365,25 @@ Entering state 11 Stack now 0 4 11 Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err Error: discarding token invalid token (1.6: ) Error: popping token error (1.2-6: ) Stack now 0 4 -LAC: initial context discarded due to error recovery Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token '+' (1.8: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err Error: discarding token '+' (1.8: ) Error: popping token error (1.2-6: ) Stack now 0 4 -LAC: initial context discarded due to error recovery Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token number (1.10: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err Error: discarding token number (1.10: 1) Error: popping token error (1.2-8: ) Stack now 0 4 -LAC: initial context discarded due to error recovery Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 @@ -161598,7 +166392,7 @@ Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -161615,16 +166409,14 @@ Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.15-18: 1111) -> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.19-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-11: 1111) $2 = token '=' (1.13: ) $3 = nterm exp (1.15-18: 1111) @@ -161633,203 +166425,70 @@ Stack now 0 8 Next token is token '\n' (1.19-2.0: ) Shifting token '\n' (1.19-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-18: 1111) $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1485: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1446: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1449: cat stderr -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1451: cat stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: cat stderr +input: stderr: stderr: stderr: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stdout: -syntax error: invalid character: '#' +stderr: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1479: $PREPARSER ./calc /dev/null +571. calc.at:1504: stderr: +error: null divisor +stderr: +syntax error, unexpected '=' + | (1 + 1) / (1 - 1) +stderr: +1.6: syntax error: invalid character: '#' +stderr: +1.11-17: error: null divisor +./calc.at:1440: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -161843,7 +166502,7 @@ Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 @@ -161864,34 +166523,25 @@ Entering state 11 Stack now 0 4 11 Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err Error: discarding token invalid token (1.6: ) Error: popping token error (1.2-6: ) Stack now 0 4 -LAC: initial context discarded due to error recovery Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token '+' (1.8: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err Error: discarding token '+' (1.8: ) Error: popping token error (1.2-6: ) Stack now 0 4 -LAC: initial context discarded due to error recovery Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token number (1.10: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err Error: discarding token number (1.10: 1) Error: popping token error (1.2-8: ) Stack now 0 4 -LAC: initial context discarded due to error recovery Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 @@ -161900,7 +166550,7 @@ Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -161917,16 +166567,14 @@ Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.15-18: 1111) -> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.19-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-11: 1111) $2 = token '=' (1.13: ) $3 = nterm exp (1.15-18: 1111) @@ -161935,54 +166583,87 @@ Stack now 0 8 Next token is token '\n' (1.19-2.0: ) Shifting token '\n' (1.19-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-18: 1111) $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.7: syntax error +syntax error: invalid character: '#' +stderr: +./calc.at:1435: cat stderr +1.7: syntax error +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +1.6: syntax error: invalid character: '#' +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: cat stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS input: -./calc.at:1446: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - - | - | +1 - | (# + 1) = 1111 -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1449: $PREPARSER ./calc input -563. calc.at:1489: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... -input: -./calc.at:1489: mv calc.y.tmp calc.y - + skipped (calc.at:1504) stderr: - | (1 + # + 1) = 1111 -./calc.at:1468: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' +stderr: +stderr: +stderr: + | 1//2 +./calc.at:1485: $PREPARSER ./calc input +stderr: +syntax error, unexpected end of input +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: $PREPARSER ./calc input ./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -161993,35 +166674,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -syntax error: invalid character: '#' stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1443: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -162031,96 +166696,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: $PREPARSER ./calc input stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./calc.at:1455: cat stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +1.11-17: error: null divisor +syntax error, unexpected '=' syntax error: invalid character: '#' -2.1: syntax error -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -162130,9 +166713,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1476: cat stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -162143,92 +166726,334 @@ }eg ' expout || exit 77 stderr: -input: - | (1 + 1) / (1 - 1) +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1476: $PREPARSER ./calc /dev/null +stderr: +1.7: syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 20 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 -Stack now 0 8 20 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 8 20 29 21 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1455: $PREPARSER ./calc input -input: -./calc.at:1449: cat stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.6: syntax error: invalid character: '#' +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -162237,15 +167062,352 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +stderr: ' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1476: $PREPARSER ./calc input +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) input: + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: - | (1 + # + 1) = 1111 -./calc.at:1449: $PREPARSER ./calc input stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1435: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -162259,7 +167421,7 @@ Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 @@ -162274,16 +167436,14 @@ Shifting token number (1.6: 1) Entering state 1 Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.6: 1) -> $$ = nterm exp (1.6: 1) Entering state 29 Stack now 0 4 12 20 29 Reading a token Next token is token ')' (1.7: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R7 G12 S26 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -162292,10 +167452,9 @@ Stack now 0 4 12 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -162317,7 +167476,7 @@ Shifting token number (1.12: 1) Entering state 1 Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.12: 1) -> $$ = nterm exp (1.12: 1) Entering state 12 @@ -162332,16 +167491,14 @@ Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 22 4 12 19 28 Reading a token Next token is token ')' (1.17: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -162350,10 +167507,9 @@ Stack now 0 8 22 4 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -162362,9 +167518,7 @@ Stack now 0 8 22 31 Reading a token Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R10 G8 S24 -Reducing stack by rule 10 (line 106): +Reducing stack by rule 10 (line 93): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -162374,1067 +167528,407 @@ Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -error: null divisor -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: cat stderr -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: cat stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +Starting parse +Entering state 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Stack now 0 8 18 27 +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +syntax error, unexpected end of input +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr +./calc.at:1469: cat stderr +1.11-17: error: null divisor +stderr: +./calc.at:1457: cat stderr +./calc.at:1445: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Stack now 0 6 8 20 29 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 20 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Stack now 0 8 20 4 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 6 4 2 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 18 2 10 +Stack now 0 8 20 4 12 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 19 4 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Stack now 0 6 8 19 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 6 8 19 4 12 19 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Stack now 0 6 8 18 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Stack now 0 6 8 18 27 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 6 4 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 6 4 12 23 1 +Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -stderr: +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1451: $PREPARSER ./calc /dev/null -error: null divisor -1.6: syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + +input: +./calc.at:1438: cat stderr + | (1 + # + 1) = 1111 + | (* *) + (*) + (*) + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1441: $PREPARSER ./calc input stderr: stderr: Starting parse @@ -163450,7 +167944,7 @@ Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 @@ -163465,16 +167959,14 @@ Shifting token number (1.6: 1) Entering state 1 Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.6: 1) -> $$ = nterm exp (1.6: 1) Entering state 29 Stack now 0 4 12 20 29 Reading a token Next token is token ')' (1.7: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R7 G12 S26 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -163483,10 +167975,9 @@ Stack now 0 4 12 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -163508,7 +167999,7 @@ Shifting token number (1.12: 1) Entering state 1 Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.12: 1) -> $$ = nterm exp (1.12: 1) Entering state 12 @@ -163523,16 +168014,14 @@ Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 22 4 12 19 28 Reading a token Next token is token ')' (1.17: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -163541,10 +168030,9 @@ Stack now 0 8 22 4 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -163553,9 +168041,7 @@ Stack now 0 8 22 31 Reading a token Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R10 G8 S24 -Reducing stack by rule 10 (line 106): +Reducing stack by rule 10 (line 93): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -163565,1209 +168051,343 @@ Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: cat stderr +1.11-17: error: null divisor stderr: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 -Stack now 0 6 8 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Stack now 0 6 8 20 29 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 19 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Stack now 0 6 8 19 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 6 8 19 4 12 19 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Stack now 0 6 8 18 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Stack now 0 6 8 18 27 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 6 4 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 6 4 12 23 1 +Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.6: syntax error: invalid character: '#' -input: -input: - | (1 + 1) / (1 - 1) -1.1: syntax error -./calc.at:1468: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1443: $PREPARSER ./calc input -input: -stderr: -error: null divisor -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -1.1: syntax error -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -./calc.at:1455: cat stderr -error: null divisor -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -164784,28 +168404,21 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -540. calc.at:1455: ./calc.at:1451: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1453: cat stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164815,11 +168428,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - ok -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: cat stderr -stderr: -./calc.at:1476: cat stderr Starting parse Entering state 0 Stack now 0 @@ -164829,104 +168437,96 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -164943,43 +168543,103 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1469: $PREPARSER ./calc /dev/null +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +./calc.at:1477: cat stderr +./calc.at:1476: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +1.2: syntax error +1.10: syntax error +1.16: syntax error stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: cat stderr +syntax error + | (1 + #) = 1111 input: - | (1 + 1) / (1 - 1) -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1451: cat stderr - -stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +541. calc.at:1457: | (1 + #) = 1111 +stderr: + | (!!) + (1 2) = 1 + ok +1.11-17: error: null divisor +./calc.at:1451: $PREPARSER ./calc input +syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: $PREPARSER ./calc input +input: ./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -164990,28 +168650,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.11-17: error: null divisor -546. calc.at:1476: ok -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1445: $PREPARSER ./calc input my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11-17: error: null divisor -./calc.at:1446: cat stderr - -./calc.at:1443: cat stderr -./calc.at:1468: cat stderr -input: -input: -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1478: cat stderr +./calc.at:1479: cat stderr +./calc.at:1480: cat stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165021,54 +168676,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | 1//2 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1451: $PREPARSER ./calc input -stderr: -544. calc.at:1468: stderr: - ok -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 - | (1 + #) = 1111 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: $PREPARSER ./calc input ./calc.at:1449: cat stderr -stderr: -stderr: -Starting parse +538. calc.at:1453: Starting parse Entering state 0 Stack now 0 Reading a token @@ -165109,48 +168718,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -165167,43 +168792,14 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (1 + 1) / (1 - 1) stderr: -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -536. calc.at:1449: ok +./calc.at:1486: cat stderr +./calc.at:1443: cat stderr + ok stderr: -564. calc.at:1489: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... +stderr: +./calc.at:1438: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -165217,7 +168813,7 @@ Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 @@ -165238,9 +168834,12 @@ Entering state 11 Stack now 0 4 11 Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err Error: discarding token invalid token (1.6: ) Error: popping token error (1.2-6: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 @@ -165249,7 +168848,7 @@ Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -165266,14 +168865,16 @@ Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) @@ -165282,203 +168883,201 @@ Stack now 0 8 Next token is token '\n' (1.15-2.0: ) Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1489: mv calc.y.tmp calc.y - -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1446: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -input: -./calc.at:1443: cat stderr - | error -./calc.at:1451: cat stderr -./calc.at:1446: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -565. calc.at:1491: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1443: $PREPARSER ./calc input stderr: -./calc.at:1491: mv calc.y.tmp calc.y - input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 - | (!!) + (1 2) = 1 -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: $PREPARSER ./calc input -stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 4 12 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Stack now 0 4 12 +Error: popping nterm exp (1) Stack now 0 4 -Shifting token error (1.1-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of input () +Cleanup: popping nterm input () 1.11: syntax error 1.1-16: error: 2222 != 1 + | + | +1 +./calc.at:1477: $PREPARSER ./calc input + +syntax error + +572. calc.at:1509: testing Calculator D ... +./calc.at:1509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +573. calc.at:1510: testing Calculator D %locations ... +input: +./calc.at:1468: cat stderr +input: ./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1446: cat stderr +./calc.at:1440: cat stderr +input: +./calc.at:1454: cat stderr +./calc.at:1478: cat stderr +./calc.at:1510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1482: cat stderr + | + | +1 +574. calc.at:1512: testing Calculator D parse.error=detailed api.prefix={calc} %verbose ... +./calc.at:1480: $PREPARSER ./calc input +input: +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 + | (# + 1) = 1111 + | (# + 1) = 1111 +./calc.at:1478: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1449: $PREPARSER ./calc input stderr: -./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 +stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: +stderr: +./calc.at:1477: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1512: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 1.11: syntax error 1.1-16: error: 2222 != 1 +syntax error, unexpected '+' Starting parse Entering state 0 Stack now 0 @@ -165488,32 +169087,38 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.1-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.1-6: ) +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token @@ -165521,9 +169126,9 @@ Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 @@ -165538,14 +169143,16 @@ Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) @@ -165554,49 +169161,42 @@ Stack now 0 8 Next token is token '\n' (1.15-2.0: ) Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1446: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1479: $PREPARSER ./calc input ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1451: "$PERL" -pi -e 'use strict; +input: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165606,78 +169206,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -566. calc.at:1491: testing Calculator glr2.cc %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1443: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1491: mv calc.y.tmp calc.y - -stderr: -./calc.at:1446: $PREPARSER ./calc input -stdout: -567. calc.at:1492: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1492: mv calc.y.tmp calc.y - -./calc.at:1451: cat stderr -stderr: -./types.at:139: $PREPARSER ./test -input: -./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 input: - | (1 + # + 1) = 1111 + | (!!) + (1 2) = 1 ./calc.at:1443: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -stderr: -./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1489: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -165703,416 +169234,15 @@ Entering state 20 Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -======== Testing with C++ standard flags: '' -stderr: -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1446: cat stderr -./calc.at:1491: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: cat stderr -input: -input: - | - | +1 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1443: cat stderr - | (* *) + (*) + (*) -./calc.at:1451: $PREPARSER ./calc input -stderr: -stderr: -stderr: -./calc.at:1492: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -1.2: syntax error -1.10: syntax error -1.16: syntax error -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $PREPARSER ./test -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -1.2: syntax error -1.10: syntax error -1.16: syntax error - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1443: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 Reading a token Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 90): @@ -166221,23 +169351,22 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: cat stderr -./calc.at:1446: cat stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1446: $PREPARSER ./calc /dev/null +2.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +572. calc.at:1509: stderr: +stderr: + | (# + 1) = 1111 +./calc.at:1468: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +stderr: +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -166379,721 +169508,128 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 - | 1 + 2 * 3 + !+ ++ -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stdout: -./calc.at:1443: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./types.at:139: ./check -input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -532. calc.at:1443: | 1 + 2 * 3 + !- ++ - ok -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: $PREPARSER ./calc input -stderr: -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: cat stderr -stderr: - -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1451: cat stderr -Starting parse -Entering state 0 -Stack now 0 +Stack now 0 4 12 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Stack now 0 4 12 +Error: popping nterm exp (1) Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of input () +Cleanup: popping nterm input () + skipped (calc.at:1509) +539. calc.at:1454: ok stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +530. calc.at:1440: ok +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 + + + +575. calc.at:1514: testing Calculator D %debug ... +./calc.at:1514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +576. calc.at:1516: testing Calculator D parse.error=custom ... +577. calc.at:1517: testing Calculator D %locations parse.error=custom ... +./calc.at:1516: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1517: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +syntax error, unexpected '+' input: -./calc.at:1446: "$PERL" -pi -e 'use strict; +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167103,27 +169639,70 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./types.at:139: ./check - | (#) + (#) = 2222 -./calc.at:1451: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -568. calc.at:1492: testing Calculator glr2.cc %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1446: cat stderr -./calc.at:1492: mv calc.y.tmp calc.y - +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: cat stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 = 2 = 3 + | (!!) + (1 2) = 1 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1441: cat stderr +574. calc.at:1512: stderr: +stderr: +stderr: +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +2.1: syntax error +./calc.at:1486: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: +stderr: 1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (!!) + (1 2) = 1 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: +573. calc.at:1510: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () Starting parse Entering state 0 Stack now 0 @@ -167264,7 +169843,8 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 ./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -167275,7 +169855,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +1.2: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + skipped (calc.at:1510) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1512) +syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -167416,35 +170018,24 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1446: "$PERL" -pi -e 'use strict; +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1455: cat stderr +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1451: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: - | (1 + #) = 1111 -stdout: -./calc.at:1446: cat stderr -./types.at:139: $PREPARSER ./test -./calc.at:1451: $PREPARSER ./calc input -stderr: -input: -stderr: - | (- *) + (1 2) = 1 -1.6: syntax error: invalid character: '#' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' stderr: Starting parse Entering state 0 @@ -167455,129 +170046,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -167594,10 +170177,20 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.6: syntax error: invalid character: '#' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; +527. calc.at:1435: ok +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr + +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167607,158 +170200,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1451: cat stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +input: +stderr: +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167768,19 +170241,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (# + 1) = 1111 -./calc.at:1451: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -input: - | (* *) + (*) + (*) -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167790,6 +170251,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (1 + 1) / (1 - 1) stderr: Starting parse Entering state 0 @@ -167800,286 +170262,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 20 4 -Shifting token error (1.10: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -168096,11 +170393,13 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + # + 1) = 1111 -./calc.at:1451: $PREPARSER ./calc input stderr: -./calc.at:1446: "$PERL" -pi -e 'use strict; +input: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168109,380 +170408,23 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1469: cat stderr ' expout || exit 77 -stderr: -stdout: -1.6: syntax error: invalid character: '#' -./types.at:139: ./check -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -1.6: syntax error: invalid character: '#' +./calc.at:1441: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1438: cat stderr input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +./calc.at:1448: cat stderr +./calc.at:1476: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1451: cat stderr -input: -input: - | (1 + 1) / (1 - 1) - | 1 + 2 * 3 + !- ++ -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1451: $PREPARSER ./calc input -stderr: -stderr: -stdout: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.11-17: error: null divisor -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -======== Testing with C++ standard flags: '' -stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168492,277 +170434,48 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: "$PERL" -pi -e 'use strict; +2.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: cat stderr + +./calc.at:1480: cat stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; + | error +./calc.at:1449: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./calc.at:1446: cat stderr -./calc.at:1451: cat stderr -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | (#) + (#) = 2222 -./calc.at:1446: $PREPARSER ./calc input -537. calc.at:1451: ok -stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1446: "$PERL" -pi -e 'use strict; +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168772,11 +170485,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: cat stderr input: - | (1 + #) = 1111 -./calc.at:1446: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +' expout || exit 77 +./calc.at:1485: $PREPARSER ./calc input stderr: +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: +575. calc.at:1514: 577. calc.at:1517: input: Starting parse Entering state 0 Stack now 0 @@ -168801,170 +170525,107 @@ Entering state 20 Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 Reading a token Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) + $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stdout: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -Starting parse -Entering state 0 -Stack now 0 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 22 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 22 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 +Stack now 0 8 22 4 12 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 22 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -168981,10 +170642,10 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -569. calc.at:1494: testing Calculator C++ %glr-parser %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1494: mv calc.y.tmp calc.y - -./calc.at:1446: "$PERL" -pi -e 'use strict; +./calc.at:1480: $PREPARSER ./calc /dev/null +./calc.at:1469: $PREPARSER ./calc input +2.1: syntax error +576. calc.at:1516: ./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168994,13 +170655,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1446: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1446: $PREPARSER ./calc input stderr: stdout: + | 1 + 2 * 3 + !+ ++ stderr: Starting parse Entering state 0 @@ -169017,25 +170674,34 @@ Entering state 11 Stack now 0 4 11 Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err Error: discarding token invalid token (1.2: ) Error: popping token error (1.1-2: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err Error: discarding token '+' (1.4: ) Error: popping token error (1.1-2: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.1-4: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token number (1.6: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err Error: discarding token number (1.6: 1) Error: popping token error (1.1-4: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.1-6: ) Entering state 11 Stack now 0 4 11 @@ -169044,7 +170710,7 @@ Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.1-6: ) $3 = token ')' (1.7: ) @@ -169061,14 +170727,16 @@ Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) @@ -169077,31 +170745,158 @@ Stack now 0 8 Next token is token '\n' (1.15-2.0: ) Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: ./check -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | (- *) + (1 2) = 1 +./calc.at:1451: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +529. calc.at:1438: ./calc.at:1476: $PREPARSER ./calc input + ok + skipped (calc.at:1514) +578. calc.at:1518: testing Calculator D %locations parse.error=detailed ... +./calc.at:1480: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + + +./calc.at:1518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y + skipped (calc.at:1516) + skipped (calc.at:1517) +./calc.at:1448: $PREPARSER ./calc input +stderr: +stderr: +syntax error, unexpected end of input +stderr: +./calc.at:1477: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () + + + + +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +./calc.at:1477: cat stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1478: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1446: cat stderr +stderr: +./calc.at:1443: cat stderr +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 + | (!!) + (1 2) = 1 + | (1 + # + 1) = 1111 +stdout: +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1489: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1476: cat stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +stderr: stderr: +input: +input: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1477: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Stack now 0 @@ -169117,25 +170912,34 @@ Entering state 11 Stack now 0 4 11 Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err Error: discarding token invalid token (1.2: ) Error: popping token error (1.1-2: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err Error: discarding token '+' (1.4: ) Error: popping token error (1.1-2: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.1-4: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token number (1.6: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err Error: discarding token number (1.6: 1) Error: popping token error (1.1-4: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.1-6: ) Entering state 11 Stack now 0 4 11 @@ -169144,7 +170948,7 @@ Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.1-6: ) $3 = token ')' (1.7: ) @@ -169161,14 +170965,16 @@ Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) @@ -169177,29 +170983,34 @@ Stack now 0 8 Next token is token '\n' (1.15-2.0: ) Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1446: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: + | (- *) + (1 2) = 1 +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169209,266 +171020,130 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: cat stderr input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1443: $PREPARSER ./calc input stderr: - | (1 + # + 1) = 1111 -stdout: -./calc.at:1446: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +./calc.at:1480: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token 1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: +syntax error, unexpected end of input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token number (1) +Error: discarding token number (1) +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1446: cat stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +stderr: input: - | (1 + 1) / (1 - 1) -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1494: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS + | (1 + # + 1) = 1111 +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: Starting parse Entering state 0 @@ -169611,7 +171286,83 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1489: $PREPARSER ./calc input +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +579. calc.at:1519: testing Calculator D %locations parse.error=simple ... +./calc.at:1519: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +581. calc.at:1521: testing Calculator D parse.error=detailed %debug api.symbol.prefix={SYMB_} api.token.prefix={TOK_} %verbose ... +580. calc.at:1520: testing Calculator D parse.error=detailed %debug %verbose ... +./calc.at:1521: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1520: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +582. calc.at:1523: testing Calculator D %locations parse.lac=full parse.error=detailed ... +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +input: +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +583. calc.at:1524: testing Calculator D %locations parse.lac=full parse.error=custom ... +stderr: +stderr: +./calc.at:1524: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error + | (1 + # + 1) = 1111 +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: cat stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' + | (# + 1) = 1111 stderr: Starting parse Entering state 0 @@ -169622,113 +171373,120 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 8 22 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 8 22 4 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Stack now 0 8 22 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 8 22 4 12 19 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -169736,7 +171494,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -169754,7 +171512,29 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: "$PERL" -pi -e 'use strict; +stderr: + | (- *) + (1 2) = 1 +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1482: cat stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1448: $PREPARSER ./calc input +syntax error, unexpected number +error: 2222 != 1 +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169764,76 +171544,273 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: cat stderr -534. calc.at:1446: ok - stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -570. calc.at:1494: testing Calculator glr2.cc %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1494: mv calc.y.tmp calc.y - -./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1494: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1486: cat stderr +1.6: syntax error: invalid character: '#' stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1485: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 stderr: -./calc.at:1485: $PREPARSER ./calc input -stdout: -./calc.at:1477: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - +584. calc.at:1525: testing Calculator D %locations parse.lac=full parse.error=detailed parse.trace ... +578. calc.at:1518: syntax error: invalid character: '#' +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1525: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y stderr: Starting parse Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token number (1) -Shifting token number (1) +Error: discarding token number (1) +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -169841,11 +171818,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -169853,23 +171830,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -169879,16 +171856,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -169897,21 +171874,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -169919,11 +171896,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -169935,29 +171912,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -169971,22 +171948,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -169995,12 +171972,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170009,11 +171986,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170023,11 +172000,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -170035,23 +172012,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -170064,22 +172041,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -170088,12 +172065,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170107,16 +172084,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -170124,7 +172101,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -170135,16 +172112,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -170154,16 +172131,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -170171,13 +172148,15 @@ Entering state 8 Next token is token '\n' () Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Entering state 2./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1518) +4 +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170186,11 +172165,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170208,28 +172187,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -170242,22 +172221,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -170266,12 +172245,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170280,21 +172259,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -170302,16 +172281,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -170321,16 +172300,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -170344,22 +172323,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -170368,22 +172347,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -170395,11 +172374,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -170407,16 +172386,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -170425,7 +172404,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -170433,7 +172412,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -170443,16 +172422,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -170461,12 +172440,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170475,21 +172454,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -170497,11 +172476,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -170509,24 +172488,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) -Entering sinput: -tate 32 +Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -170536,16 +172514,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -170554,12 +172532,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170569,11 +172547,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -170581,16 +172559,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -170599,7 +172577,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -170610,16 +172588,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -170629,16 +172607,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -170647,46 +172625,221 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.1: syntax error +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: cat stderr +input: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | +1 +./calc.at:1482: $PREPARSER ./calc input + | (1 + # + 1) = 1111 + | 1 2 +stderr: +stderr: +./calc.at:1441: cat stderr +./calc.at:1468: $PREPARSER ./calc input +stderr: +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1477: $PREPARSER ./calc input Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -170694,11 +172847,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -170706,23 +172859,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -170732,16 +172885,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -170750,21 +172903,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -170772,11 +172925,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -170788,29 +172941,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -170824,22 +172977,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -170848,12 +173001,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170862,11 +173015,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170876,11 +173029,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -170888,23 +173041,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -170917,22 +173070,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -170941,12 +173094,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -170960,16 +173113,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -170977,7 +173130,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -170988,16 +173141,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -171007,16 +173160,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -171025,12 +173178,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -171039,11 +173192,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -171061,28 +173214,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -171095,22 +173248,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -171119,12 +173272,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -171133,21 +173286,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -171155,16 +173308,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -171174,16 +173327,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -171197,22 +173350,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -171221,22 +173374,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -171248,11 +173401,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -171260,16 +173413,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -171278,7 +173431,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -171286,7 +173439,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -171296,16 +173449,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -171314,12 +173467,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -171328,21 +173481,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -171350,11 +173503,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -171362,23 +173515,27 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Next token./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + is token '=' () +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -171388,16 +173545,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -171406,12 +173563,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -171421,11 +173578,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -171433,16 +173590,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -171451,7 +173608,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -171462,16 +173619,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -171481,16 +173638,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -171499,68 +173656,23 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -input: - | 1 2 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: - | 1 2 -./calc.at:1477: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -stderr: -1.3: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171569,118 +173681,18 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg -' expout || exit 77 -stderr: -stderr: -stdout: -1.3: syntax error -./calc.at:1469: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1485: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +' expout || exit 77 ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -input: -./calc.at:1469: $PREPARSER ./calc input - | 1//2 -./calc.at:1485: $PREPARSER ./calc input -stderr: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1477: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -input: -stdout: - | 1//2 -stderr: - | 1 2 -./calc.at:1469: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./types.at:139: ./check -stderr: -./calc.at:1477: $PREPARSER ./calc input -syntax error -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error stderr: -syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error +579. calc.at:1519: syntax error: invalid character: '#' +./calc.at:1480: cat stderr +1.6: syntax error: invalid character: '#' ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -171690,178 +173702,180 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1478: $PREPARSER ./calc /dev/null +stderr: ' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -input: -./calc.at:1477: cat stderr - | error -./calc.at:1469: cat stderr -./calc.at:1485: $PREPARSER ./calc input -input: stderr: - | error +582. calc.at:1523: syntax error, unexpected number +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -input: -stderr: -Starting parse -Entering state 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -1.1: syntax error - | 1//2 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.1: syntax error -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Next token is token '+' () syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -./calc.at:1477: cat stderr -input: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 = 2 = 3 -./calc.at:1485: $PREPARSER ./calc input -input: - | 1 = 2 = 3 -stderr: -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1469: cat stderr +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -stderr: -input: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -Starting parse -Entering state 0 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1469: $PREPARSER ./calc input -stderr: -stderr: -1.7: syntax error -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: ./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -171872,7 +173886,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171882,7 +173897,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1469: "$PERL" -pi -e 'use strict; + skipped (calc.at:1519) + + | 1 2 +581. calc.at:1521: stderr: + skipped (calc.at:1523) +./calc.at:1489: $PREPARSER ./calc input +1.1: syntax error +580. calc.at:1520: 531. calc.at:1441: ok +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171892,31 +173915,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: cat stderr -./calc.at:1469: cat stderr -./calc.at:1477: cat stderr + skipped (calc.at:1521) + skipped (calc.at:1520) + + +stderr: + + + input: input: - | - | +1 -./calc.at:1477: $PREPARSER ./calc input - | - | +1 input: - | 1 = 2 = 3 -./calc.at:1469: $PREPARSER ./calc input -stderr: -./calc.at:1485: $PREPARSER ./calc input -2.1: syntax error -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -stderr: -syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171926,7 +173940,47 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1469: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./calc.at:1476: cat stderr +stderr: +stderr: + | error +./calc.at:1486: $PREPARSER ./calc input +583. calc.at:1524: ./calc.at:1469: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (1 + # + 1) = 1111 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1451: cat stderr +./calc.at:1455: $PREPARSER ./calc input +stderr: +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171936,77 +173990,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1477: $PREPARSER ./calc /dev/null -./calc.at:1469: cat stderr -stderr: -1.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1469: $PREPARSER ./calc input -stderr: -1.1: syntax error -stderr: Starting parse Entering state 0 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' Error: popping nterm input () Cleanup: discarding lookahead token '+' () -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -syntax error -./calc.at:1477: cat stderr -input: -./calc.at:1469: "$PERL" -pi -e 'use strict; +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -172016,99 +174020,262 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1477: $PREPARSER ./calc input stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1524) +1.1: syntax error stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1469: cat stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; +585. calc.at:1530: testing Calculator D api.token.constructor %locations parse.error=custom api.value.type=union ... +syntax error, unexpected number +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +stdout: +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: cat stderr +./calc.at:1448: cat stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1469: $PREPARSER ./calc /dev/null +./calc.at:1445: cat stderr +587. calc.at:1532: testing Calculator D api.push-pull=both ... +input: +./calc.at:1479: cat stderr +./calc.at:1482: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y stderr: + | (!!) + (1 2) = 1 ./calc.at:1485: cat stderr -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: $PREPARSER ./calc /dev/null -stderr: -syntax error -./calc.at:1477: cat stderr +./calc.at:1469: $PREPARSER ./calc input + | (* *) + (*) + (*) +syntax error: invalid character: '#' +./calc.at:1451: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (!!) + (1 2) = 1 +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () Starting parse Entering state 0 Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1469: "$PERL" -pi -e 'use strict; +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +586. calc.at:1531: testing Calculator D api.token.constructor %locations parse.error=detailed ... +./calc.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +input: +589. calc.at:1544: testing Calculator Java ... +./calc.at:1544: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1479: $PREPARSER ./calc input + +./calc.at:1477: cat stderr +./calc.at:1477: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) -stderr: ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1449: $PREPARSER ./calc input stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +stderr: +590. calc.at:1545: testing Calculator Java parse.error=custom ... +588. calc.at:1533: testing Calculator D parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1469: cat stderr -./calc.at:1485: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; +stderr: +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -172119,598 +174286,191 @@ }eg ' expout || exit 77 input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -input: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1477: cat stderr -./calc.at:1485: $PREPARSER ./calc input -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 +./calc.at:1533: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +584. calc.at:1525: stderr: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () +585. calc.at:1530: | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1482: $PREPARSER ./calc input + skipped (calc.at:1525) +1.11-17: error: null divisor +1.2: syntax error +1.10: syntax error +1.16: syntax error syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input -stderr: +587. calc.at:1532: input: +./calc.at:1478: cat stderr Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token '+' (1.8: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-8: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Stack now 0 4 11 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () +Next token is token number (1.10: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-10: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Stack now 0 4 11 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +Next token is token '\n' (1.19-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +error: 2222 != 1 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1480: cat stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr +./calc.at:1545: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + | 1 = 2 = 3 +./calc.at:1485: $PREPARSER ./calc input + skipped (calc.at:1532) stderr: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: + skipped (calc.at:1530) stderr: -./calc.at:1486: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1476: cat stderr + +591. calc.at:1546: testing Calculator Java parse.error=detailed ... + +./calc.at:1546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1485: cat stderr stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -173547,13 +175307,101 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +592. calc.at:1547: testing Calculator Java parse.error=verbose ... +./calc.at:1547: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +593. calc.at:1548: testing Calculator Java %locations parse.error=custom ... +./calc.at:1548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +594. calc.at:1549: testing Calculator Java %locations parse.error=detailed ... +./calc.at:1549: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +input: + | 1 2 + | (- *) + (1 2) = 1 +589. calc.at:1544: | (1 + 1) / (1 - 1) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (1 + 1) / (1 - 1) +stderr: +586. calc.at:1531: stderr: +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +stderr: + | (1 + # + 1) = 1111 + | (#) + (#) = 2222 +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1479: $PREPARSER ./calc input +stderr: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () syntax error error: 2222 != 1 -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse +590. calc.at:1545: Starting parse Entering state 0 Reading a token Next token is token "number" (1) @@ -173898,11 +175746,7 @@ Entering state 8 Next token is token '\n' () Shifting token '\n' () -Entering state 2stderr: -syntax error -error: 2222 != 1 -input: -4 +Entering state 24 Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () @@ -174248,7 +176092,10 @@ -> $$ = nterm exp (3) Entering state 32 Reading a token -Next token is token '=' () +Next token1.2: syntax error +1.10: syntax error +1.16: syntax error + is token '=' () Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () @@ -174393,319 +176240,21 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () - | (!!) + (1 2) = 1 -input: -./calc.at:1485: $PREPARSER ./calc input - | 1 2 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1477: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () - | (* *) + (*) + (*) ./calc.at:1477: $PREPARSER ./calc input +588. calc.at:1533: syntax error +error: 2222 != 1 + skipped (calc.at:1531) + skipped (calc.at:1544) +1.11-17: error: null divisor + skipped (calc.at:1533) + skipped (calc.at:1545) + + + + stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: cat stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -./calc.at:1485: cat stderr stdout: -input: - | (- *) + (1 2) = 1 -./calc.at:1478: "$PERL" -ne ' +./calc.at:1485: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -174716,329 +176265,186 @@ || /\t/ )' calc.cc -./calc.at:1477: "$PERL" -pi -e 'use strict; +595. calc.at:1550: testing Calculator Java %locations parse.error=verbose ... +597. calc.at:1552: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} ... +596. calc.at:1551: testing Calculator Java parse.trace parse.error=verbose ... +./calc.at:1550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1552: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +598. calc.at:1554: testing Calculator Java api.push-pull=both ... +./calc.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +input: +input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1469: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -input: -stderr: -input: -./calc.at:1477: cat stderr -./calc.at:1485: $PREPARSER ./calc input +./calc.at:1446: cat stderr +./calc.at:1489: cat stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +593. calc.at:1548: 591. calc.at:1546: | (1 + # + 1) = 1111 +592. calc.at:1547: ./calc.at:1476: $PREPARSER ./calc input | 1//2 -syntax error -syntax error -error: 2222 != 1 -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 + | (* *) + (*) + (*) +./calc.at:1480: $PREPARSER ./calc input stderr: -./calc.at:1478: $PREPARSER ./calc input -input: -stdout: stderr: -./calc.at:1480: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - | 1 + 2 * 3 + !+ ++ - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1477: $PREPARSER ./calc input +./calc.at:1443: $PREPARSER ./calc input +stderr: stderr: stderr: + skipped (calc.at:1548) +stderr: +1.11-17: error: null divisor +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 stderr: +1.11-17: error: null divisor +syntax error, unexpected number + skipped (calc.at:1546) + skipped (calc.at:1547) Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Stack now 0 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -syntax error -syntax error -error: 2222 != 1 -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Stack now 0 4 12 Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () +Stack now 0 4 Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Stack now 0 4 11 Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Error: discarding token number (1) +Error: popping token error () +Stack now 0 4 Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Stack now 0 4 11 Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Stack now 0 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 +Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file () +Next token is token end of input () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () Cleanup: popping nterm input () -stderr: +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -175052,99 +176458,21 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +./calc.at:1485: $PREPARSER ./calc input + + + +600. calc.at:1556: testing Calculator Java parse.trace parse.error=custom %locations %lex-param {InputStream is} api.push-pull=both ... +599. calc.at:1555: testing Calculator Java api.push-pull=both parse.error=detailed %locations ... +./calc.at:1556: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +601. calc.at:1557: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} api.push-pull=both ... +./calc.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1557: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y input: -./calc.at:1480: $PREPARSER ./calc input -stderr: - | 1 2 -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -input: -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1477: $PREPARSER ./calc input -1.3: syntax error ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -1.3: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) -./calc.at:1469: cat stderr - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 2 -stderr: -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1486: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -stderr: -input: -syntax error, unexpected number -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -input: -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175154,293 +176482,224 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (* *) + (*) + (*) -./calc.at:1485: $PREPARSER ./calc input - | error -./calc.at:1486: $PREPARSER ./calc input -stderr: -syntax error, unexpected number +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1478: cat stderr + | 1 2 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +595. calc.at:1550: ./calc.at:1482: $PREPARSER ./calc input +594. calc.at:1549: stderr: stderr: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +syntax error: invalid character: '#' +syntax error, unexpected '/', expecting number or '-' or '(' or '!' syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: +error: 2222 != 1 + skipped (calc.at:1549) stderr: - | 1//2 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1477: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () + skipped (calc.at:1550) Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -syntax error -syntax error -syntax error -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + + +602. calc.at:1560: testing Calculator Java parse.trace parse.error=custom %locations parse.lac=full ... +603. calc.at:1561: testing Calculator Java parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1480: cat stderr -1.3: syntax error -input: -./calc.at:1486: "$PERL" -pi -e 'use strict; +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175450,106 +176709,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1477: $PREPARSER ./calc input -input: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1561: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1455: cat stderr | 1//2 +596. calc.at:1551: 598. calc.at:1554: | (* *) + (*) + (*) +597. calc.at:1552: ./calc.at:1489: $PREPARSER ./calc input +./calc.at:1446: $PREPARSER ./calc input stderr: -./calc.at:1486: cat stderr -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1485: cat stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stdout: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: cat stderr -./calc.at:1482: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1485: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ - | 1 = 2 = 3 -./calc.at:1486: $PREPARSER ./calc input -input: -./calc.at:1478: cat stderr -./calc.at:1469: $PREPARSER ./calc input stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1482: $PREPARSER ./calc input stderr: +1.11-17: error: null divisor +./calc.at:1482: $PREPARSER ./calc /dev/null +syntax error, unexpected number +stderr: + skipped (calc.at:1551) +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 + skipped (calc.at:1554) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + skipped (calc.at:1552) +1.11-17: error: null divisor Starting parse Entering state 0 Reading a token @@ -175561,146 +176758,139 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () +syntax error Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () +Cleanup: discarding lookahead token "number" (2) Starting parse Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Stack now 0 4 12 +Error: popping nterm exp (1) +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | error -stderr: -./calc.at:1478: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token number (1) +Error: discarding token number (1) +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -175708,11 +176898,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -175720,23 +176910,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -175746,16 +176936,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -175764,21 +176954,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -175786,11 +176976,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -175802,29 +176992,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -175838,22 +177028,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -175862,12 +177052,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -175876,11 +177066,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -175890,11 +177080,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -175902,23 +177092,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -175931,22 +177121,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -175955,12 +177145,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -175974,16 +177164,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -175991,7 +177181,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -176002,16 +177192,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -176021,16 +177211,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -176039,12 +177229,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -176053,11 +177243,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -176075,28 +177265,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -176109,22 +177299,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -176133,12 +177323,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -176147,21 +177337,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -176169,16 +177359,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -176188,16 +177378,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -176211,22 +177401,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -176235,22 +177425,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -176262,11 +177452,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -176274,16 +177464,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -176292,7 +177482,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -176300,7 +177490,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -176310,16 +177500,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -176328,12 +177518,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -176342,21 +177532,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -176364,11 +177554,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -176376,23 +177566,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -176402,16 +177592,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -176420,12 +177610,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -176435,11 +177625,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -176447,16 +177637,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -176465,7 +177655,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -176476,16 +177666,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -176495,16 +177685,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -176513,28 +177703,76 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + + +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1468: cat stderr +./calc.at:1480: cat stderr +599. calc.at:1555: ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +600. calc.at:1556: 601. calc.at:1557: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: stderr: -input: stderr: +stderr: +stderr: +./calc.at:1478: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () + skipped (calc.at:1555) Starting parse Entering state 0 Reading a token @@ -176570,83 +177808,15 @@ -> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -1.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -176656,16 +177826,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -176674,21 +177844,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -176696,11 +177866,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -176712,29 +177882,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -176748,22 +177918,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -176772,12 +177942,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -176786,11 +177956,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -176800,11 +177970,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -176812,23 +177982,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -176841,22 +178011,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -176865,12 +178035,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -176884,16 +178054,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -176901,7 +178071,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -176912,16 +178082,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -176931,16 +178101,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -176949,12 +178119,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -176963,11 +178133,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -176985,28 +178155,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -177019,22 +178189,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -177043,12 +178213,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -177057,21 +178227,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -177079,16 +178249,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -177098,16 +178268,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -177121,22 +178291,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -177145,22 +178315,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -177172,11 +178342,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -177184,16 +178354,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -177202,7 +178372,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -177210,7 +178380,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -177220,16 +178390,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -177238,12 +178408,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -177252,21 +178422,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -177274,11 +178444,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -177286,23 +178456,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -177312,16 +178482,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -177330,12 +178500,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -177345,11 +178515,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -177357,16 +178527,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -177375,7 +178545,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -177386,16 +178556,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -177405,16 +178575,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -177423,1578 +178593,1539 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1480: cat stderr -./calc.at:1469: $PREPARSER ./calc input -input: -input: -input: - | (1 + #) = 1111 -stderr: -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1485: $PREPARSER ./calc input -1.1: syntax error - | 1 2 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1480: $PREPARSER ./calc input -stdout: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1479: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -./calc.at:1486: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -1.6: syntax error: invalid character: '#' -syntax error, unexpected invalid token -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected invalid token -input: -stderr: -./calc.at:1478: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: -./calc.at:1486: $PREPARSER ./calc input - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) -input: - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: cat stderr - | 1 = 2 = 3 -stderr: -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1485: cat stderr -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr - | 1 2 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -stderr: - | (#) + (#) = 2222 -input: -1.7: syntax error -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number - | (#) + (#) = 2222 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1480: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr -stderr: -1.7: syntax error - | (# + 1) = 1111 -stderr: -./calc.at:1477: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -syntax error, unexpected number -input: -stderr: - | 1 = 2 = 3 -1.2: syntax error: invalid character: '#' -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: - | 1//2 -stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '=' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -stderr: -stderr: -./calc.at:1486: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -syntax error, unexpected '=' -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: $PREPARSER ./calc /dev/null -./calc.at:1485: cat stderr -stderr: -stderr: -./calc.at:1479: cat stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -input: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: - | 1//2 -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () -./calc.at:1479: $PREPARSER ./calc input -stderr: -input: -2.1: syntax error -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -stderr: -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: cat stderr -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr -stderr: -./calc.at:1480: cat stderr -2.1: syntax error -stderr: -input: +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '/', expecting number or '-' or '(' or '!' - | (1 + #) = 1111 -./calc.at:1469: $PREPARSER ./calc input + skipped (calc.at:1556) + skipped (calc.at:1557) Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1480: $PREPARSER ./calc input -stderr: -input: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' - | (1 + # + 1) = 1111 -./calc.at:1482: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1477: $PREPARSER ./calc input -syntax error, unexpected '+' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1486: cat stderr -stderr: -stderr: -syntax error, unexpected '+' -syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -stderr: -stdout: -./types.at:139: $PREPARSER ./test -input: -./calc.at:1478: cat stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1479: cat stderr - | error -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1486: $PREPARSER ./calc input -stderr: -./calc.at:1478: $PREPARSER ./calc /dev/null -1.6: syntax error: invalid character: '#' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -1.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Stack now 0 4 11 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -438. types.at:139: ok -./calc.at:1485: cat stderr -stderr: -stderr: +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + + + +605. torture.at:216: testing Big horizontal ... +604. torture.at:132: testing Big triangle ... +./torture.at:230: "$PERL" -w ./gengram.pl 1000 || exit 77 +./torture.at:138: "$PERL" -w ./gengram.pl 200 || exit 77 +606. torture.at:270: testing State number type: 128 states ... +./torture.at:270: ruby $abs_top_srcdir/tests/linear 128 >input.y || exit 77 +--- /dev/null 2026-10-11 17:46:22.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/stderr 2026-10-12 20:01:50.856870317 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/test-source: line 14: ruby: command not found input: - | error -./calc.at:1479: $PREPARSER ./calc input +./calc.at:1486: cat stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +606. torture.at:270: | (1 + 1) / (1 - 1) +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected invalid token +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: - -./calc.at:1476: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - + skipped (torture.at:270) Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 Reading a token Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1469: "$PERL" -pi -e 'use strict; +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + +stdout: +%code top { /* -*- c -*- */ +/* Adjust to the compiler. + We used to do it here, but each time we add a new line, + we have to adjust all the line numbers in error messages. + It's simpler to use a constant include to a varying file. */ +#include +} + +%define parse.error verbose +%debug +%{ +#include +#include +#define MAX 1000 +static int yylex (void); +#include + +/* !POSIX */ static void yyerror (const char *msg); +%} + +%token + t1 1 "1" + t2 2 "2" + t3 3 "3" + t4 4 "4" + t5 5 "5" + t6 6 "6" + t7 7 "7" + t8 8 "8" + t9 9 "9" + t10 10 "10" + t11 11 "11" + t12 12 "12" + t13 13 "13" + t14 14 "14" + t15 15 "15" + t16 16 "16" + t17 17 "17" + t18 18 "18" + t19 19 "19" + t20 20 "20" + t21 21 "21" + t22 22 "22" + t23 23 "23" + t24 24 "24" + t25 25 "25" + t26 26 "26" + t27 27 "27" + t28 28 "28" + t29 29 "29" + t30 30 "30" + t31 31 "31" + t32 32 "32" + t33 33 "33" + t34 34 "34" + t35 35 "35" + t36 36 "36" + t37 37 "37" + t38 38 "38" + t39 39 "39" + t40 40 "40" + t41 41 "41" + t42 42 "42" + t43 43 "43" + t44 44 "44" + t45 45 "45" + t46 46 "46" + t47 47 "47" + t48 48 "48" + t49 49 "49" + t50 50 "50" + t51 51 "51" + t52 52 "52" + t53 53 "53" + t54 54 "54" + t55 55 "55" + t56 56 "56" + t57 57 "57" + t58 58 "58" + t59 59 "59" + t60 60 "60" + t61 61 "61" + t62 62 "62" + t63 63 "63" + t64 64 "64" + t65 65 "65" + t66 66 "66" + t67 67 "67" + t68 68 "68" + t69 69 "69" + t70 70 "70" + t71 71 "71" + t72 72 "72" + t73 73 "73" + t74 74 "74" + t75 75 "75" + t76 76 "76" + t77 77 "77" + t78 78 "78" + t79 79 "79" + t80 80 "80" + t81 81 "81" + t82 82 "82" + t83 83 "83" + t84 84 "84" + t85 85 "85" + t86 86 "86" + t87 87 "87" + t88 88 "88" + t89 89 "89" + t90 90 "90" + t91 91 "91" + t92 92 "92" + t93 93 "93" + t94 94 "94" + t95 95 "95" + t96 96 "96" + t97 97 "97" + t98 98 "98" + t99 99 "99" + t100 100 "100" + t101 101 "101" + t102 102 "102" + t103 103 "103" + t104 104 "104" + t105 105 "105" + t106 106 "106" + t107 107 "107" + t108 108 "108" + t109 109 "109" + t110 110 "110" + t111 111 "111" + t112 112 "112" + t113 113 "113" + t114 114 "114" + t115 115 "115" + t116 116 "116" + t117 117 "117" + t118 118 "118" + t119 119 "119" + t120 120 "120" + t121 121 "121" + t122 122 "122" + t123 123 "123" + t124 124 "124" + t125 125 "125" + t126 126 "126" + t127 127 "127" + t128 128 "128" + t129 129 "129" + t130 130 "130" + t131 131 "131" + t132 132 "132" + t133 133 "133" + t134 134 "134" + t135 135 "135" + t136 136 "136" + t137 137 "137" + t138 138 "138" + t139 139 "139" + t140 140 "140" + t141 141 "141" + t142 142 "142" + t143 143 "143" + t144 144 "144" + t145 145 "145" + t146 146 "146" + t147 147 "147" + t148 148 "148" + t149 149 "149" + t150 150 "150" + t151 151 "151" + t152 152 "152" + t153 153 "153" + t154 154 "154" + t155 155 "155" + t156 156 "156" + t157 157 "157" + t158 158 "158" + t159 159 "159" + t160 160 "160" + t161 161 "161" + t162 162 "162" + t163 163 "163" + t164 164 "164" + t165 165 "165" + t166 166 "166" + t167 167 "167" + t168 168 "168" + t169 169 "169" + t170 170 "170" + t171 171 "171" + t172 172 "172" + t173 173 "173" + t174 174 "174" + t175 175 "175" + t176 176 "176" + t177 177 "177" + t178 178 "178" + t179 179 "179" + t180 180 "180" + t181 181 "181" + t182 182 "182" + t183 183 "183" + t184 184 "184" + t185 185 "185" + t186 186 "186" + t187 187 "187" + t188 188 "188" + t189 189 "189" + t190 190 "190" + t191 191 "191" + t192 192 "192" + t193 193 "193" + t194 194 "194" + t195 195 "195" + t196 196 "196" + t197 197 "197" + t198 198 "198" + t199 199 "199" + t200 200 "200" + t201 201 "201" + t202 202 "202" + t203 203 "203" + t204 204 "204" + t205 205 "205" + t206 206 "206" + t207 207 "207" + t208 208 "208" + t209 209 "209" + t210 210 "210" + t211 211 "211" + t212 212 "212" + t213 213 "213" + t214 214 "214" + t215 215 "215" + t216 216 "216" + t217 217 "217" + t218 218 "218" + t219 219 "219" + t220 220 "220" + t221 221 "221" + t222 222 "222" + t223 223 "223" + t224 224 "224" + t225 225 "225" + t226 226 "226" + t227 227 "227" + t228 228 "228" + t229 229 "229" + t230 230 "230" + t231 231 "231" + t232 232 "232" + t233 233 "233" + t234 234 "234" + t235 235 "235" + t236 236 "236" + t237 237 "237" + t238 238 "238" + t239 239 "239" + t240 240 "240" + t241 241 "241" + t242 242 "242" + t243 243 "243" + t244 244 "244" + t245 245 "245" + t246 246 "246" + t247 247 "247" + t248 248 "248" + t249 249 "249" + t250 250 "250" + t251 251 "251" + t252 252 "252" + t253 253 "253" + t254 254 "254" + t255 255 "255" + t256 256 "256" + t257 257 "257" + t258 258 "258" + t259 259 "259" + t260 260 "260" + t261 261 "261" + t262 262 "262" + t263 263 "263" + t264 264 "264" + t265 265 "265" + t266 266 "266" + t267 267 "267" + t268 268 "268" + t269 269 "269" + t270 270 "270" + t271 271 "271" + t272 272 "272" + t273 273 "273" + t274 274 "274" + t275 275 "275" + t276 276 "276" + t277 277 "277" + t278 278 "278" + t279 279 "279" + t280 280 "280" + t281 281 "281" + t282 282 "282" + t283 283 "283" + t284 284 "284" + t285 285 "285" + t286 286 "286" + t287 287 "287" + t288 288 "288" + t289 289 "289" + t290 290 "290" + t291 291 "291" + t292 292 "292" + t293 293 "293" + t294 294 "294" + t295 295 "295" + t296 296 "296" + t297 297 "297" + t298 298 "298" + t299 299 "299" + t300 300 "300" + t301 301 "301" + t302 302 "302" + t303 303 "303" + t304 304 "304" + t305 305 "305" + t306 306 "306" + t307 307 "307" + t308 308 "308" + t309 309 "309" + t310 310 "310" + t311 311 "311" + t312 312 "312" + t313 313 "313" + t314 314 "314" + t315 315 "315" + t316 316 "316" + t317 317 "317" + t318 318 "318" + t319 319 "319" + t320 320 "320" + t321 321 "321" + t322 322 "322" + t323 323 "323" + t324 324 "324" + t325 325 "325" + t326 326 "326" + t327 327 "327" + t328 328 "328" + t329 329 "329" + t330 330 "330" + t331 331 "331" + t332 332 "332" + t333 333 "333" + t334 334 "334" + t335 335 "335" + t336 336 "336" + t337 337 "337" + t338 338 "338" + t339 339 "339" + t340 340 "340" + t341 341 "341" + t342 342 "342" + t343 343 "343" + t344 344 "344" + t345 345 "345" + t346 346 "346" + t347 347 "347" + t348 348 "348" + t349 349 "349" + t350 350 "350" + t351 351 "351" + t352 352 "352" + t353 353 "353" + t354 354 "354" + t355 355 "355" + t356 356 "356" + t357 357 "357" + t358 358 "358" + t359 359 "359" + t360 360 "360" + t361 361 "361" + t362 362 "362" + t363 363 "363" + t364 364 "364" + t365 365 "365" + t366 366 "366" + t367 367 "367" + t368 368 "368" + t369 369 "369" + t370 370 "370" + t371 371 "371" + t372 372 "372" + t373 373 "373" + t374 374 "374" + t375 375 "375" + t376 376 "376" + t377 377 "377" + t378 378 "378" + t379 379 "379" + t380 380 "380" + t381 381 "381" + t382 382 "382" + t383 383 "383" + t384 384 "384" + t385 385 "385" + t386 386 "386" + t387 387 "387" + t388 388 "388" + t389 389 "389" + t390 390 "390" + t391 391 "391" + t392 392 "392" + t393 393 "393" + t394 394 "394" + t395 395 "395" + t396 396 "396" + t397 397 "397" + t398 398 "398" + t399 399 "399" + t400 400 "400" + t401 401 "401" + t402 402 "402" + t403 403 "403" + t404 404 "404" + t405 405 "405" + t406 406 "406" + t407 407 "407" + t408 408 "408" + t409 409 "409" + t410 410 "410" + t411 411 "411" + t412 412 "412" + t413 413 "413" + t414 414 "414" + t415 415 "415" + t416 416 "416" + t417 417 "417" + t418 418 "418" + t419 419 "419" + t420 420 "420" + t421 421 "421" + t422 422 "422" + t423 423 "423" + t424 424 "424" + t425 425 "425" + t426 426 "426" + t427 427 "427" + t428 428 "428" + t429 429 "429" + t430 430 "430" + t431 431 "431" + t432 432 "432" + t433 433 "433" + t434 434 "434" + t435 435 "435" + t436 436 "436" + t437 437 "437" + t438 438 "438" + t439 439 "439" + t440 440 "440" + t441 441 "441" + t442 442 "442" + t443 443 "443" + t444 444 "444" + t445 445 "445" + t446 446 "446" + t447 447 "447" + t448 448 "448" + t449 449 "449" + t450 450 "450" + t451 451 "451" + t452 452 "452" + t453 453 "453" + t454 454 "454" + t455 455 "455" + t456 456 "456" + t457 457 "457" + t458 458 "458" + t459 459 "459" + t460 460 "460" + t461 461 "461" + t462 462 "462" + t463 463 "463" + t464 464 "464" + t465 465 "465" + t466 466 "466" + t467 467 "467" + t468 468 "468" + t469 469 "469" + t470 470 "470" + t471 471 "471" + t472 472 "472" + t473 473 "473" + t474 474 "474" + t475 475 "475" + t476 476 "476" + t477 477 "477" + t478 478 "478" + t479 479 "479" + t480 480 "480" + t481 481 "481" + t482 482 "482" + t483 483 "483" + t484 484 "484" + t485 485 "485" + t486 486 "486" + t487 487 "487" + t488 488 "488" + t489 489 "489" + t490 490 "490" + t491 491 "491" + t492 492 "492" + t493 493 "493" + t494 494 "494" + t495 495 "495" + t496 496 "496" + t497 497 "497" + t498 498 "498" + t499 499 "499" + t500 500 "500" + t501 501 "501" + t502 502 "502" + t503 503 "503" + t504 504 "504" + t505 505 "505" + t506 506 "506" + t507 507 "507" + t508 508 "508" + t509 509 "509" + t510 510 "510" + t511 511 "511" + t512 512 "512" + t513 513 "513" + t514 514 "514" + t515 515 "515" + t516 516 "516" + t517 517 "517" + t518 518 "518" + t519 519 "519" + t520 520 "520" + t521 521 "521" + t522 522 "522" + t523 523 "523" + t524 524 "524" + t525 525 "525" + t526 526 "526" + t527 527 "527" + t528 528 "528" + t529 529 "529" + t530 530 "530" + t531 531 "531" + t532 532 "532" + t533 533 "533" + t534 534 "534" + t535 535 "535" + t536 536 "536" + t537 537 "537" + t538 538 "538" + t539 539 "539" + t540 540 "540" + t541 541 "541" + t542 542 "542" + t543 543 "543" + t544 544 "544" + t545 545 "545" + t546 546 "546" + t547 547 "547" + t548 548 "548" + t549 549 "549" + t550 550 "550" + t551 551 "551" + t552 552 "552" + t553 553 "553" + t554 554 "554" + t555 555 "555" + t556 556 "556" + t557 557 "557" + t558 558 "558" + t559 559 "559" + t560 560 "560" + t561 561 "561" + t562 562 "562" + t563 563 "563" + t564 564 "564" + t565 565 "565" + t566 566 "566" + t567 567 "567" + t568 568 "568" + t569 569 "569" + t570 570 "570" + t571 571 "571" + t572 572 "572" + t573 573 "573" + t574 574 "574" + t575 575 "575" + t576 576 "576" + t577 577 "577" + t578 578 "578" + t579 579 "579" + t580 580 "580" + t581 581 "581" + t582 582 "582" + t583 583 "583" + t584 584 "584" + t585 585 "585" + t586 586 "586" + t587 587 "587" + t588 588 "588" + t589 589 "589" + t590 590 "590" + t591 591 "591" + t592 592 "592" + t593 593 "593" + t594 594 "594" + t595 595 "595" + t596 596 "596" + t597 597 "597" + t598 598 "598" + t599 599 "599" + t600 600 "600" + t601 601 "601" + t602 602 "602" + t603 603 "603" + t604 604 "604" + t605 605 "605" + t606 606 "606" + t607 607 "607" + t608 608 "608" + t609 609 "609" + t610 610 "610" + t611 611 "611" + t612 612 "612" + t613 613 "613" + t614 614 "614" + t615 615 "615" + t616 616 "616" + t617 617 "617" + t618 618 "618" + t619 619 "619" + t620 620 "620" + t621 621 "621" + t622 622 "622" + t623 623 "623" + t624 624 "624" + t625 625 "625" + t626 626 "626" + t627 627 "627" + t628 628 "628" + t629 629 "629" + t630 630 "630" + t631 631 "631" + t632 632 "632" + t633 633 "633" + t634 634 "634" + t635 635 "635" + t636 636 "636" + t637 637 "637" + t638 638 "638" + t639 639 "639" + t640 640 "640" + t641 641 "641" + t642 642 "642" + t643 643 "643" + t644 644 "644" + t645 645 "645" + t646 646 "646" + t647 647 "647" + t648 648 "648" + t649 649 "649" + t650 650 "650" + t651 651 "651" + t652 652 "652" + t653 653 "653" + t654 654 "654" + t655 655 "655" + t656 656 "656" + t657 657 "657" + t658 658 "658" + t659 659 "659" + t660 660 "660" + t661 661 "661" + t662 662 "662" + t663 663 "663" + t664 664 "664" + t665 665 "665" + t666 666 "666" + t667 667 "667" + t668 668 "668" + t669 669 "669" + t670 670 "670" + t671 671 "671" + t672 672 "672" + t673 673 "673" + t674 674 "674" + t675 675 "675" + t676 676 "676" + t677 677 "677" + t678 678 "678" + t679 679 "679" + t680 680 "680" + t681 681 "681" + t682 682 "682" + t683 683 "683" + t684 684 "684" + t685 685 "685" + t686 686 "686" + t687 687 "687" + t688 688 "688" + t689 689 "689" + t690 690 "690" + t691 691 "691" + t692 692 "692" + t693 693 "693" + t694 694 "694" + t695 695 "695" + t696 696 "696" + t697 697 "697" + t698 698 "698" + t699 699 "699" + t700 700 "700" + t701 701 "701" + t702 702 "702" + t703 703 "703" + t704 704 "704" + t705 705 "705" + t706 706 "706" + t707 707 "707" + t708 708 "708" + t709 709 "709" + t710 710 "710" + t711 711 "711" + t712 712 "712" + t713 713 "713" + t714 714 "714" + t715 715 "715" + t716 716 "716" + t717 717 "717" + t718 718 "718" + t719 719 "719" + t720 720 "720" + t721 721 "721" + t722 722 "722" + t723 723 "723" + t724 724 "724" + t725 725 "725" + t726 726 "726" + t727 727 "727" + t728 728 "728" + t729 729 "729" + t730 730 "730" + t731 731 "731" + t732 732 "732" + t733 733 "733" + t734 734 "734" + t735 735 "735" + t736 736 "736" + t737 737 "737" + t738 738 "738" + t739 739 "739" + t740 740 "740" + t741 741 "741" + t742 742 "742" + t743 743 "743" + t744 744 "744" + t745 745 "745" + t746 746 "746" + t747 747 "747" + t748 748 "748" + t749 749 "749" + t750 750 "750" + t751 751 "751" + t752 752 "752" + t753 753 "753" + t754 754 "754" + t755 755 "755" + t756 756 "756" + t757 757 "757" + t758 758 "758" + t759 759 "759" + t760 760 "760" + t761 761 "761" + t762 762 "762" + t763 763 "763" + t764 764 "764" + t765 765 "765" + t766 766 "766" + t767 767 "767" + t768 768 "768" + t769 769 "769" + t770 770 "770" + t771 771 "771" + t772 772 "772" + t773 773 "773" + t774 774 "774" + t775 775 "775" + t776 776 "776" + t777 777 "777" + t778 778 "778" + t779 779 "779" + t780 780 "780" + t781 781 "781" + t782 782 "782" + t783 783 "783" + t784 784 "784" + t785 785 "785" + t786 786 "786" + t787 787 "787" + t788 788 "788" + t789 789 "789" + t790 790 "790" + t791 791 "791" + t792 792 "792" + t793 793 "793" + t794 794 "794" + t795 795 "795" + t796 796 "796" + t797 797 "797" + t798 798 "798" + t799 799 "799" + t800 800 "800" + t801 801 "801" + t802 802 "802" + t803 803 "803" + t804 804 "804" + t805 805 "805" + t806 806 "806" + t807 807 "807" + t808 808 "808" + t809 809 "809" + t810 810 "810" + t811 811 "811" + t812 812 "812" + t813 813 "813" + t814 814 "814" + t815 815 "815" + t816 816 "816" + t817 817 "817" + t818 818 "818" + t819 819 "819" + t820 820 "820" + t821 821 "821" + t822 822 "822" + t823 823 "823" + t824 824 "824" + t825 825 "825" + t826 826 "826" + t827 827 "827" + t828 828 "828" + t829 829 "829" + t830 830 "830" + t831 831 "831" + t832 832 "832" + t833 833 "833" + t834 834 "834" + t835 835 "835" + t836 836 "836" + t837 837 "837" + t838 838 "838" + t839 839 "839" + t840 840 "840" + t841 841 "841" + t842 842 "842" + t843 843 "843" + t844 844 "844" + t845 845 "845" + t846 846 "846" + t847 847 "847" + t848 848 "848" + t849 849 "849" + t850 850 "850" + t851 851 "851" + t852 852 "852" + t853 853 "853" + t854 854 "854" + t855 855 "855" + t856 856 "856" + t857 857 "857" + t858 858 "858" + t859 859 "859" + t860 860 "860" + t861 861 "861" + t862 862 "862" + t863 863 "863" + t864 864 "864" + t865 865 "865" + t866 866 "866" + t867 867 "867" + t868 868 "868" + t869 869 "869" + t870 870 "870" + t871 871 "871" + t872 872 "872" + t873 873 "873" + t874 874 "874" + t875 875 "875" + t876 876 "876" + t877 877 "877" + t878 878 "878" + t879 879 "879" + t880 880 "880" + t881 881 "881" + t882 882 "882" + t883 883 "883" + t884 884 "884" + t885 885 "885" + t886 886 "886" + t887 887 "887" + t888 888 "888" + t889 889 "889" + t890 890 "890" + t891 891 "891" + t892 892 "892" + t893 893 "893" + t894 894 "894" + t895 895 "895" + t896 896 "896" + t897 897 "897" + t898 898 "898" + t899 899 "899" + t900 900 "900" + t901 901 "901" + t902 902 "902" + t903 903 "903" + t904 904 "904" + t905 905 "905" + t906 906 "906" + t907 907 "907" + t908 908 "908" + t909 909 "909" + t910 910 "910" + t911 911 "911" + t912 912 "912" + t913 913 "913" + t914 914 "914" + t915 915 "915" + t916 916 "916" + t917 917 "917" + t918 918 "918" + t919 919 "919" + t920 920 "920" + t921 921 "921" + t922 922 "922" + t923 923 "923" + t924 924 "924" + t925 925 "925" + t926 926 "926" + t927 927 "927" + t928 928 "928" + t929 929 "929" + t930 930 "930" + t931 931 "931" + t932 932 "932" + t933 933 "933" + t934 934 "934" + t935 935 "935" + t936 936 "936" + t937 937 "937" + t938 938 "938" + t939 939 "939" + t940 940 "940" + t941 941 "941" + t942 942 "942" + t943 943 "943" + t944 944 "944" + t945 945 "945" + t946 946 "946" + t947 947 "947" + t948 948 "948" + t949 949 "949" + t950 950 "950" + t951 951 "951" + t952 952 "952" + t953 953 "953" + t954 954 "954" + t955 955 "955" + t956 956 "956" + t957 957 "957" + t958 958 "958" + t959 959 "959" + t960 960 "960" + t961 961 "961" + t962 962 "962" + t963 963 "963" + t964 964 "964" + t965 965 "965" + t966 966 "966" + t967 967 "967" + t968 968 "968" + t969 969 "969" + t970 970 "970" + t971 971 "971" + t972 972 "972" + t973 973 "973" + t974 974 "974" + t975 975 "975" + t976 976 "976" + t977 977 "977" + t978 978 "978" + t979 979 "979" + t980 980 "980" + t981 981 "981" + t982 982 "982" + t983 983 "983" + t984 984 "984" + t985 985 "985" + t986 986 "986" + t987 987 "987" + t988 988 "988" + t989 989 "989" + t990 990 "990" + t991 991 "991" + t992 992 "992" + t993 993 "993" + t994 994 "994" + t995 995 "995" + t996 996 "996" + t997 997 "997" + t998 998 "998" + t999 999 "999" + t1000 1000 "1000" + +%% +exp: "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" "199" "200" "201" "202" "203" "204" "205" "206" "207" "208" + "209" "210" "211" "212" "213" "214" "215" "216" "217" "218" "219" "220" + "221" "222" "223" "224" "225" "226" "227" "228" "229" "230" "231" "232" + "233" "234" "235" "236" "237" "238" "239" "240" "241" "242" "243" "244" + "245" "246" "247" "248" "249" "250" "251" "252" "253" "254" "255" "256" + "257" "258" "259" "260" "261" "262" "263" "264" "265" "266" "267" "268" + "269" "270" "271" "272" "273" "274" "275" "276" "277" "278" "279" "280" + "281" "282" "283" "284" "285" "286" "287" "288" "289" "290" "291" "292" + "293" "294" "295" "296" "297" "298" "299" "300" "301" "302" "303" "304" + "305" "306" "307" "308" "309" "310" "311" "312" "313" "314" "315" "316" + "317" "318" "319" "320" "321" "322" "323" "324" "325" "326" "327" "328" + "329" "330" "331" "332" "333" "334" "335" "336" "337" "338" "339" "340" + "341" "342" "343" "344" "345" "346" "347" "348" "349" "350" "351" "352" + "353" "354" "355" "356" "357" "358" "359" "360" "361" "362" "363" "364" + "365" "366" "367" "368" "369" "370" "371" "372" "373" "374" "375" "376" + "377" "378" "379" "380" "381" "382" "383" "384" "385" "386" "387" "388" + "389" "390" "391" "392" "393" "394" "395" "396" "397" "398" "399" "400" + "401" "402" "403" "404" "405" "406" "407" "408" "409" "410" "411" "412" + "413" "414" "415" "416" "417" "418" "419" "420" "421" "422" "423" "424" + "425" "426" "427" "428" "429" "430" "431" "432" "433" "434" "435" "436" + "437" "438" "439" "440" "441" "442" "443" "444" "445" "446" "447" "448" + "449" "450" "451" "452" "453" "454" "455" "456" "457" "458" "459" "460" + "461" "462" "463" "464" "465" "466" "467" "468" "469" "470" "471" "472" + "473" "474" "475" "476" "477" "478" "479" "480" "481" "482" "483" "484" + "485" "486" "487" "488" "489" "490" "491" "492" "493" "494" "495" "496" + "497" "498" "499" "500" "501" "502" "503" "504" "505" "506" "507" "508" + "509" "510" "511" "512" "513" "514" "515" "516" "517" "518" "519" "520" + "521" "522" "523" "524" "525" "526" "527" "528" "529" "530" "531" "532" + "533" "534" "535" "536" "537" "538" "539" "540" "541" "542" "543" "544" + "545" "546" "547" "548" "549" "550" "551" "552" "553" "554" "555" "556" + "557" "558" "559" "560" "561" "562" "563" "564" "565" "566" "567" "568" + "569" "570" "571" "572" "573" "574" "575" "576" "577" "578" "579" "580" + "581" "582" "583" "584" "585" "586" "587" "588" "589" "590" "591" "592" + "593" "594" "595" "596" "597" "598" "599" "600" "601" "602" "603" "604" + "605" "606" "607" "608" "609" "610" "611" "612" "613" "614" "615" "616" + "617" "618" "619" "620" "621" "622" "623" "624" "625" "626" "627" "628" + "629" "630" "631" "632" "633" "634" "635" "636" "637" "638" "639" "640" + "641" "642" "643" "644" "645" "646" "647" "648" "649" "650" "651" "652" + "653" "654" "655" "656" "657" "658" "659" "660" "661" "662" "663" "664" + "665" "666" "667" "668" "669" "670" "671" "672" "673" "674" "675" "676" + "677" "678" "679" "680" "681" "682" "683" "684" "685" "686" "687" "688" + "689" "690" "691" "692" "693" "694" "695" "696" "697" "698" "699" "700" + "701" "702" "703" "704" "705" "706" "707" "708" "709" "710" "711" "712" + "713" "714" "715" "716" "717" "718" "719" "720" "721" "722" "723" "724" + "725" "726" "727" "728" "729" "730" "731" "732" "733" "734" "735" "736" + "737" "738" "739" "740" "741" "742" "743" "744" "745" "746" "747" "748" + "749" "750" "751" "752" "753" "754" "755" "756" "757" "758" "759" "760" + "761" "762" "763" "764" "765" "766" "767" "768" "769" "770" "771" "772" + "773" "774" "775" "776" "777" "778" "779" "780" "781" "782" "783" "784" + "785" "786" "787" "788" "789" "790" "791" "792" "793" "794" "795" "796" + "797" "798" "799" "800" "801" "802" "803" "804" "805" "806" "807" "808" + "809" "810" "811" "812" "813" "814" "815" "816" "817" "818" "819" "820" + "821" "822" "823" "824" "825" "826" "827" "828" "829" "830" "831" "832" + "833" "834" "835" "836" "837" "838" "839" "840" "841" "842" "843" "844" + "845" "846" "847" "848" "849" "850" "851" "852" "853" "854" "855" "856" + "857" "858" "859" "860" "861" "862" "863" "864" "865" "866" "867" "868" + "869" "870" "871" "872" "873" "874" "875" "876" "877" "878" "879" "880" + "881" "882" "883" "884" "885" "886" "887" "888" "889" "890" "891" "892" + "893" "894" "895" "896" "897" "898" "899" "900" "901" "902" "903" "904" + "905" "906" "907" "908" "909" "910" "911" "912" "913" "914" "915" "916" + "917" "918" "919" "920" "921" "922" "923" "924" "925" "926" "927" "928" + "929" "930" "931" "932" "933" "934" "935" "936" "937" "938" "939" "940" + "941" "942" "943" "944" "945" "946" "947" "948" "949" "950" "951" "952" + "953" "954" "955" "956" "957" "958" "959" "960" "961" "962" "963" "964" + "965" "966" "967" "968" "969" "970" "971" "972" "973" "974" "975" "976" + "977" "978" "979" "980" "981" "982" "983" "984" "985" "986" "987" "988" + "989" "990" "991" "992" "993" "994" "995" "996" "997" "998" "999" "1000" + ; +%% +#include + + + + +/* A C error reporting function. */ +/* !POSIX */ static +void yyerror (const char *msg) +{ + fprintf (stderr, "%s\n", msg); +} +static int +yylex (void) +{ + static int counter = 1; + if (counter <= MAX) + return counter++; + assert (counter++ == MAX + 1); + return 0; +} +#include /* getenv. */ +#include /* strcmp. */ +int +main (int argc, char const* argv[]) +{ + (void) argc; + (void) argv; + return yyparse (); +} +607. torture.at:271: testing State number type: 129 states ... +./torture.at:271: ruby $abs_top_srcdir/tests/linear 129 >input.y || exit 77 +./torture.at:236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +608. torture.at:272: testing State number type: 256 states ... +./torture.at:272: ruby $abs_top_srcdir/tests/linear 256 >input.y || exit 77 +609. torture.at:273: testing State number type: 257 states ... +./torture.at:273: ruby $abs_top_srcdir/tests/linear 257 >input.y || exit 77 +--- /dev/null 2026-10-11 17:46:22.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/stderr 2026-10-12 20:01:50.876870317 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/test-source: line 14: ruby: command not found +--- /dev/null 2026-10-11 17:46:22.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/stderr 2026-10-12 20:01:50.880870317 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/test-source: line 14: ruby: command not found +--- /dev/null 2026-10-11 17:46:22.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/stderr 2026-10-12 20:01:50.880870317 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/test-source: line 14: ruby: command not found +610. torture.at:274: testing State number type: 32768 states ... +./torture.at:274: ruby $abs_top_srcdir/tests/linear 32768 >input.y || exit 77 +--- /dev/null 2026-10-11 17:46:22.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/stderr 2026-10-12 20:01:50.904870317 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/test-source: line 14: ruby: command not found +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179004,8 +180135,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1449: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179015,119 +180146,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (# + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input -stderr: -syntax error, unexpected invalid token -./calc.at:1477: cat stderr -stderr: -571. calc.at:1504: testing Calculator lalr1.d ... -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1480: cat stderr -input: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1469: cat stderr -./calc.at:1476: $PREPARSER ./calc input -input: -./calc.at:1480: $PREPARSER ./calc /dev/null - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1478: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179137,84 +180162,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -./calc.at:1486: cat stderr -syntax error, unexpected end of input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179225,140 +180173,10 @@ }eg ' expout || exit 77 input: -1.11-17: error: null divisor -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (# + 1) = 1111 -./calc.at:1469: $PREPARSER ./calc input -syntax error, unexpected end of input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 2 input: -./calc.at:1476: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1482: cat stderr -stderr: -./calc.at:1478: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -syntax error: invalid character: '#' -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179368,132 +180186,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: cat stderr -stderr: -stderr: -stderr: -stderr: -syntax error -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179503,44 +180196,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -input: - | 1 = 2 = 3 -input: -./calc.at:1477: "$PERL" -pi -e 'use strict; +610. torture.at:274: ./calc.at:1451: cat stderr +608. torture.at:272: 609. torture.at:273: | (1 + 1) / (1 - 1) +603. calc.at:1561: 602. calc.at:1560: 607. torture.at:271: ./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1468: $PREPARSER ./calc input my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 = 2 = 3 -./calc.at:1479: $PREPARSER ./calc input -stderr: -./calc.at:1482: $PREPARSER ./calc input + | (!!) + (1 2) = 1 stderr: -syntax error -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 + | 1 2 +./calc.at:1485: $PREPARSER ./calc input stderr: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179550,12808 +180224,2357 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1480: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input -syntax error, unexpected '=' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1477: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '=' -./calc.at:1486: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1480: $PREPARSER ./calc input -stderr: -548. calc.at:1477: ok -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1469: cat stderr -stderr: -stderr: -571. calc.at:1504: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () syntax error -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -input: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (calc.at:1504) - | (- *) + (1 2) = 1 -input: -./calc.at:1476: cat stderr -./calc.at:1486: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1478: cat stderr -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -stderr: +Cleanup: discarding lookahead token "end of input" () +stdout: + skipped (torture.at:271) +%code top { /* -*- c -*- */ +/* Adjust to the compiler. + We used to do it here, but each time we add a new line, + we have to adjust all the line numbers in error messages. + It's simpler to use a constant include to a varying file. */ +#include +} -syntax error: invalid character: '#' -input: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +%define parse.error verbose +%debug +%{ +#include +#include +#include +#define MAX 200 +static int yylex (void); +#include + +/* !POSIX */ static void yyerror (const char *msg); +%} +%union +{ + int val; +}; + +%token END "end" +%type exp input +%token t1 1 "1" +%token t2 2 "2" +%token t3 3 "3" +%token t4 4 "4" +%token t5 5 "5" +%token t6 6 "6" +%token t7 7 "7" +%token t8 8 "8" +%token t9 9 "9" +%token t10 10 "10" +%token t11 11 "11" +%token t12 12 "12" +%token t13 13 "13" +%token t14 14 "14" +%token t15 15 "15" +%token t16 16 "16" +%token t17 17 "17" +%token t18 18 "18" +%token t19 19 "19" +%token t20 20 "20" +%token t21 21 "21" +%token t22 22 "22" +%token t23 23 "23" +%token t24 24 "24" +%token t25 25 "25" +%token t26 26 "26" +%token t27 27 "27" +%token t28 28 "28" +%token t29 29 "29" +%token t30 30 "30" +%token t31 31 "31" +%token t32 32 "32" +%token t33 33 "33" +%token t34 34 "34" +%token t35 35 "35" +%token t36 36 "36" +%token t37 37 "37" +%token t38 38 "38" +%token t39 39 "39" +%token t40 40 "40" +%token t41 41 "41" +%token t42 42 "42" +%token t43 43 "43" +%token t44 44 "44" +%token t45 45 "45" +%token t46 46 "46" +%token t47 47 "47" +%token t48 48 "48" +%token t49 49 "49" +%token t50 50 "50" +%token t51 51 "51" +%token t52 52 "52" +%token t53 53 "53" +%token t54 54 "54" +%token t55 55 "55" +%token t56 56 "56" +%token t57 57 "57" +%token t58 58 "58" +%token t59 59 "59" +%token t60 60 "60" +%token t61 61 "61" +%token t62 62 "62" +%token t63 63 "63" +%token t64 64 "64" +%token t65 65 "65" +%token t66 66 "66" +%token t67 67 "67" +%token t68 68 "68" +%token t69 69 "69" +%token t70 70 "70" +%token t71 71 "71" +%token t72 72 "72" +%token t73 73 "73" +%token t74 74 "74" +%token t75 75 "75" +%token t76 76 "76" +%token t77 77 "77" +%token t78 78 "78" +%token t79 79 "79" +%token t80 80 "80" +%token t81 81 "81" +%token t82 82 "82" +%token t83 83 "83" +%token t84 84 "84" +%token t85 85 "85" +%token t86 86 "86" +%token t87 87 "87" +%token t88 88 "88" +%token t89 89 "89" +%token t90 90 "90" +%token t91 91 "91" +%token t92 92 "92" +%token t93 93 "93" +%token t94 94 "94" +%token t95 95 "95" +%token t96 96 "96" +%token t97 97 "97" +%token t98 98 "98" +%token t99 99 "99" +%token t100 100 "100" +%token t101 101 "101" +%token t102 102 "102" +%token t103 103 "103" +%token t104 104 "104" +%token t105 105 "105" +%token t106 106 "106" +%token t107 107 "107" +%token t108 108 "108" +%token t109 109 "109" +%token t110 110 "110" +%token t111 111 "111" +%token t112 112 "112" +%token t113 113 "113" +%token t114 114 "114" +%token t115 115 "115" +%token t116 116 "116" +%token t117 117 "117" +%token t118 118 "118" +%token t119 119 "119" +%token t120 120 "120" +%token t121 121 "121" +%token t122 122 "122" +%token t123 123 "123" +%token t124 124 "124" +%token t125 125 "125" +%token t126 126 "126" +%token t127 127 "127" +%token t128 128 "128" +%token t129 129 "129" +%token t130 130 "130" +%token t131 131 "131" +%token t132 132 "132" +%token t133 133 "133" +%token t134 134 "134" +%token t135 135 "135" +%token t136 136 "136" +%token t137 137 "137" +%token t138 138 "138" +%token t139 139 "139" +%token t140 140 "140" +%token t141 141 "141" +%token t142 142 "142" +%token t143 143 "143" +%token t144 144 "144" +%token t145 145 "145" +%token t146 146 "146" +%token t147 147 "147" +%token t148 148 "148" +%token t149 149 "149" +%token t150 150 "150" +%token t151 151 "151" +%token t152 152 "152" +%token t153 153 "153" +%token t154 154 "154" +%token t155 155 "155" +%token t156 156 "156" +%token t157 157 "157" +%token t158 158 "158" +%token t159 159 "159" +%token t160 160 "160" +%token t161 161 "161" +%token t162 162 "162" +%token t163 163 "163" +%token t164 164 "164" +%token t165 165 "165" +%token t166 166 "166" +%token t167 167 "167" +%token t168 168 "168" +%token t169 169 "169" +%token t170 170 "170" +%token t171 171 "171" +%token t172 172 "172" +%token t173 173 "173" +%token t174 174 "174" +%token t175 175 "175" +%token t176 176 "176" +%token t177 177 "177" +%token t178 178 "178" +%token t179 179 "179" +%token t180 180 "180" +%token t181 181 "181" +%token t182 182 "182" +%token t183 183 "183" +%token t184 184 "184" +%token t185 185 "185" +%token t186 186 "186" +%token t187 187 "187" +%token t188 188 "188" +%token t189 189 "189" +%token t190 190 "190" +%token t191 191 "191" +%token t192 192 "192" +%token t193 193 "193" +%token t194 194 "194" +%token t195 195 "195" +%token t196 196 "196" +%token t197 197 "197" +%token t198 198 "198" +%token t199 199 "199" +%token t200 200 "200" +%% input: + exp { assert ($1 == 0); $$ = $1; } +| input exp { assert ($2 == $1 + 1); $$ = $2; } +; -stderr: - | (!!) + (1 2) = 1 -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1//2 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -./calc.at:1476: $PREPARSER ./calc input -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: $PREPARSER ./calc input -stderr: -syntax error -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -1.11: syntax error -1.1-16: error: 2222 != 1 -syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse +exp: + END + { $$ = 0; } +| "1" END + { $$ = 1; } +| "1" "2" END + { $$ = 2; } +| "1" "2" "3" END + { $$ = 3; } +| "1" "2" "3" "4" END + { $$ = 4; } +| "1" "2" "3" "4" "5" END + { $$ = 5; } +| "1" "2" "3" "4" "5" "6" END + { $$ = 6; } +| "1" "2" "3" "4" "5" "6" "7" END + { $$ = 7; } +| "1" "2" "3" "4" "5" "6" "7" "8" END + { $$ = 8; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" END + { $$ = 9; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" END + { $$ = 10; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" END + { $$ = 11; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" END + { $$ = 12; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" END + { $$ = 13; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" END + { $$ = 14; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" END + { $$ = 15; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + END + { $$ = 16; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" END + { $$ = 17; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" END + { $$ = 18; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" END + { $$ = 19; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" END + { $$ = 20; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" END + { $$ = 21; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" END + { $$ = 22; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" END + { $$ = 23; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" END + { $$ = 24; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" END + { $$ = 25; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" END + { $$ = 26; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" END + { $$ = 27; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" END + { $$ = 28; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" END + { $$ = 29; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + END + { $$ = 30; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" END + { $$ = 31; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" END + { $$ = 32; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" END + { $$ = 33; } +| "1" "2" "3" "4" "5" skipped (torture.at:272) + "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" END + { $$ = 34; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" END + { $$ = 35; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" END + { $$ = 36; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" END + { $$ = 37; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" END + { $$ = 38; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" END + { $$ = 39; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" END + { $$ = 40; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" END + { $$ = 41; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" END + { $$ = 42; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" END + { $$ = 43; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + END + { $$ = 44; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" END + { $$ = 45; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" END + { $$ = 46; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" END + { $$ = 47; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" END + { $$ = 48; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" END + { $$ = 49; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" END + { $$ = 50; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" END + { $$ = 51; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" END + { $$ = 52; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" END + { $$ = 53; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" END + { $$ = 54; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" END + { $$ = 55; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" END + { $$ = 56; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" END + { $$ = 57; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + END + { $$ = 58; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" END + { $$ = 59; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" END + { $$ = 60; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" END + { $$ = 61; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" END + { $$ = 62; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" END + { $$ = 63; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" END + { $$ = 64; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" END + { $$ = 65; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" END + { $$ = 66; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" END + { $$ = 67; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" END + { $$ = 68; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" END + { $$ = 69; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" END + { $$ = 70; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" END + { $$ = 71; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + END + { $$ = 72; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" END + { $$ = 73; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" END + { $$ = 74; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" END + { $$ = 75; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" END + { $$ = 76; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" END + { $$ = 77; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" END + { $$ = 78; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" END + { $$ = 79; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" END + { $$ = 80; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" END + { $$ = 81; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" END + { $$ = 82; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" END + { $$ = 83; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" END + { $$ = 84; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" END + { $$ = 85; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + END + { $$ = 86; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" END + { $$ = 87; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" END + { $$ = 88; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" END + { $$ = 89; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" END + { $$ = 90; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" END + { $$ = 91; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" END + { $$ = 92; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" END + { $$ = 93; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" END + { $$ = 94; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" END + { $$ = 95; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" END + { $$ = 96; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" END + { $$ = 97; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" END + { $$ = 98; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" END + { $$ = 99; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + END + { $$ = 100; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" skipped (torture.at:273) + "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" END + { $$ = 101; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" END + { $$ = 102; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" END + { $$ = 103; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" END + { $$ = 104; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" END + { $$ = 105; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" END + { $$ = 106; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" END + { $$ = 107; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" END + { $$ = 108; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" END + { $$ = 109; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" END + { $$ = 110; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" END + { $$ = 111; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + END + { $$ = 112; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" END + { $$ = 113; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" END + { $$ = 114; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" END + { $$ = 115; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" END + { $$ = 116; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" END + { $$ = 117; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" END + { $$ = 118; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" END + { $$ = 119; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" END + { $$ = 120; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" END + { $$ = 121; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" END + { $$ = 122; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" END + { $$ = 123; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + END + { $$ = 124; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" END + { $$ = 125; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" END + { $$ = 126; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "9Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -./calc.at:1482: cat stderr -syntax error -./calc.at:1479: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -stderr: - | - | +1 -./calc.at:1479: $PREPARSER ./calc input -input: -input: -stdout: - | - | +1 -1.11: syntax error -1.1-16: error: 2222 != 1 - | (1 + 1) / (1 - 1) -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1480: cat stderr -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1489: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '+' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) Entering state 8 Reading a token Next token is token '/' () Shifting token '/' () Entering state 22 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: - | (!!) + (1 2) = 1 -./calc.at:1486: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -572. calc.at:1509: testing Calculator D ... -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1480: $PREPARSER ./calc input -syntax error, unexpected '+' -./calc.at:1469: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1489: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: cat stderr -input: -input: - | (1 + 1) / (1 - 1) -573. calc.at:1510: testing Calculator D %locations ... -./calc.at:1510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: - | (* *) + (*) + (*) -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1478: cat stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (7) -Shifting token "number" (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (5) -Shifting token "number" (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (4) -Shifting token "number" (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (256) -Shifting token "number" (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (64) -Shifting token "number" (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -./calc.at:1485: cat stderr -Starting parse +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +2" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" END + { $$ = 127; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" END + { $$ = 128; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" END + { $$ = 129; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" END + { $$ = 130; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" END + { $$ = 131; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" END + { $$ = 132; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" END + { $$ = 133; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" END + { $$ = 134; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" END + { $$ = 135; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + END + { $$ = 136; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" END + { $$ = 137; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" END + { $$ = 138; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" END + { $$ = 139; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" END + { $$ = 140; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" END + { $$ = 141; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" END + { $$ = 142; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" END + { $$ = 143; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" END + { $$ = 144; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" END + { $$ = 145; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" END + { $$ = 146; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" END + { $$ = 147; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "./calc.at:1480: $PREPARSER ./calc input +133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + END + { $$ = 148; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" END + { $$ = 149; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" END + { $$ = 150; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" END + { $$ = 151; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" END + { $$ = 152; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" END + { $$ = 153; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" END + { $$ = 154; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" END + { $$ = 155; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" END + { $$ = 156; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" END + { $$ = 157; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" END + { $$ = 158; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" END + { $$ = 159; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + END + { $$ = 160; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" END + { $$ = 161; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" END + { $$ = 162; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" END + { $$ = 163; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" END + { $$ = 164; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" END + { $$ = 165; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" END + { $$ = 166; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" END + { $$ = 167; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" END + { $$ = 168; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" END + { $$ = 169; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" END + { $$ = 170; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" END + { $$ = 171; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + END + { $$ = 172; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" END + { $$ = 173; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" END + { $$ = 174; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - | error -error: null divisor -input: -stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (7) -Shifting token "number" (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (5) -Shifting token "number" (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (4) -Shifting token "number" (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (256) -Shifting token "number" (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (64) -Shifting token "number" (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1482: cat stderr -./calc.at:1479: cat stderr -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: $PREPARSER ./calc input -stderr: -input: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: $PREPARSER ./calc /dev/null -./calc.at:1482: $PREPARSER ./calc /dev/null -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - | 1 2 -error: null divisor -./calc.at:1489: $PREPARSER ./calc input -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -558. calc.at:1485: ok -stderr: -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -stderr: -./calc.at:1480: cat stderr -syntax error -syntax error, unexpected end of input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1480: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -stderr: -syntax error, unexpected end of input -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -stderr: -572. calc.at:1509: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: cat stderr - skipped (calc.at:1509) -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1486: cat stderr -545. calc.at:1469: ./calc.at:1476: cat stderr - ok -./calc.at:1478: cat stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: -./calc.at:1489: cat stderr - | 1 + 2 * 3 + !+ ++ -input: -./calc.at:1482: cat stderr - | (* *) + (*) + (*) -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1479: cat stderr -./calc.at:1486: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1480: cat stderr -stderr: -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -input: -./calc.at:1476: $PREPARSER ./calc input - -573. calc.at:1510: Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -input: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1482: $PREPARSER ./calc input -stderr: -./calc.at:1489: $PREPARSER ./calc input -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -syntax error - | (* *) + (*) + (*) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - skipped (calc.at:1510) -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -syntax error -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -574. calc.at:1512: testing Calculator D parse.error=detailed api.prefix={calc} %verbose ... -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1512: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -575. calc.at:1514: testing Calculator D %debug ... -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -input: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - | 1 + 2 * 3 + !+ ++ - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | - | +1 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1479: cat stderr -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr -./calc.at:1489: cat stderr -stderr: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -stderr: -./calc.at:1486: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1479: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ -577. calc.at:1517: testing Calculator D %locations parse.error=custom ... -input: -./calc.at:1480: $PREPARSER ./calc input -input: - | (!!) + (1 2) = 1 -./calc.at:1482: $PREPARSER ./calc input -stderr: - | 1 + 2 * 3 + !- ++ - | error -stderr: -syntax error -./calc.at:1489: $PREPARSER ./calc input -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1517: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -stderr: - | (#) + (#) = 2222 -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1478: $PREPARSER ./calc input -576. calc.at:1516: testing Calculator D parse.error=custom ... -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1516: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | 1 + 2 * 3 + !- ++ -stderr: -./calc.at:1480: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -574. calc.at:1512: stderr: - skipped (calc.at:1512) -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -stderr: - -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: $PREPARSER ./calc /dev/null -./calc.at:1489: cat stderr -input: -./calc.at:1486: cat stderr -stderr: -syntax error -./calc.at:1482: cat stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -input: -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1478: cat stderr - | 1 = 2 = 3 -./calc.at:1489: $PREPARSER ./calc input -stderr: -input: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -575. calc.at:1514: stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1486: $PREPARSER ./calc input -syntax error -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -stderr: -input: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - skipped (calc.at:1514) - | (- *) + (1 2) = 1 - | (#) + (#) = 2222 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: cat stderr -stderr: -stderr: -./calc.at:1482: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () - -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -577. calc.at:1517: Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | (#) + (#) = 2222 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: - skipped (calc.at:1517) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -578. calc.at:1518: testing Calculator D %locations parse.error=detailed ... -576. calc.at:1516: ./calc.at:1518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y - -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: cat stderr -./calc.at:1476: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - skipped (calc.at:1516) -syntax error: invalid character: '#' -syntax error: invalid character: '#' -input: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | - | +1 - | (* *) + (*) + (*) -./calc.at:1478: cat stderr -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1486: cat stderr -input: -stderr: -stderr: - -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -input: -./calc.at:1482: cat stderr -./calc.at:1476: $PREPARSER ./calc input - | (# + 1) = 1111 -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' - | (1 + #) = 1111 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1478: $PREPARSER ./calc input -stderr: -stderr: -stderr: -./calc.at:1480: cat stderr -1.6: syntax error: invalid character: '#' -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1482: $PREPARSER ./calc input -input: -stderr: - | (1 + #) = 1111 -stderr: -1.6: syntax error: invalid character: '#' -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -syntax error: invalid character: '#' -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1489: cat stderr -syntax error: invalid character: '#' -input: -579. calc.at:1519: testing Calculator D %locations parse.error=simple ... -./calc.at:1519: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y - | 1 + 2 * 3 + !+ ++ -./calc.at:1479: $PREPARSER ./calc input -578. calc.at:1518: ./calc.at:1489: $PREPARSER ./calc /dev/null -./calc.at:1486: cat stderr - skipped (calc.at:1518) -stderr: -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -580. calc.at:1520: testing Calculator D parse.error=detailed %debug %verbose ... -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1520: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -input: -./calc.at:1476: cat stderr - | (1 + # + 1) = 1111 -input: -stderr: -./calc.at:1486: $PREPARSER ./calc input - -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () - | (# + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !- ++ -stderr: -input: -./calc.at:1482: cat stderr -./calc.at:1479: $PREPARSER ./calc input -581. calc.at:1521: testing Calculator D parse.error=detailed %debug api.symbol.prefix={SYMB_} api.token.prefix={TOK_} %verbose ... -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1521: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y - | (!!) + (1 2) = 1 -stderr: -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -stderr: - | 1 + 2 * 3 + !+ ++ -stderr: -syntax error -error: 2222 != 1 -1.2: syntax error: invalid character: '#' -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (# + 1) = 1111 -stderr: -syntax error -error: 2222 != 1 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1489: cat stderr -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1489: $PREPARSER ./calc input -stderr: -./calc.at:1479: cat stderr -./calc.at:1486: cat stderr -syntax error: invalid character: '#' -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -579. calc.at:1519: stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - skipped (calc.at:1519) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ - | (#) + (#) = 2222 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1479: $PREPARSER ./calc input -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -input: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) - | (1 + # + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1476: cat stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -580. calc.at:1520: ./calc.at:1486: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -1.6: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (calc.at:1520) -input: -stderr: -stderr: - -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1480: cat stderr -./calc.at:1476: $PREPARSER ./calc input -582. calc.at:1523: testing Calculator D %locations parse.lac=full parse.error=detailed ... -./calc.at:1523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -1.6: syntax error: invalid character: '#' -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) - | (1 + # + 1) = 1111 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -581. calc.at:1521: -./calc.at:1489: cat stderr -./calc.at:1480: $PREPARSER ./calc input -syntax error -syntax error -error: 2222 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - skipped (calc.at:1521) -stderr: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -syntax error -syntax error -error: 2222 != 1 -input: - | (!!) + (1 2) = 1 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $PREPARSER ./test -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error: invalid character: '#' -stderr: - -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1478: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: cat stderr -======== Testing with C++ standard flags: '' -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1478: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1486: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1479: $PREPARSER ./calc input -input: -583. calc.at:1524: testing Calculator D %locations parse.lac=full parse.error=custom ... -./calc.at:1524: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: - | (#) + (#) = 2222 -560. calc.at:1486: ok -./calc.at:1482: $PREPARSER ./calc input -1.11-17: error: null divisor -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -584. calc.at:1525: testing Calculator D %locations parse.lac=full parse.error=detailed parse.trace ... -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1476: cat stderr -1.11-17: error: null divisor -stderr: -stderr: -syntax error: invalid character: '#' -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1525: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1480: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -input: - | (* *) + (*) + (*) -stderr: -./calc.at:1476: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -582. calc.at:1523: ./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr - | (1 + 1) / (1 - 1) - skipped (calc.at:1523) -./calc.at:1480: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -585. calc.at:1530: testing Calculator D api.token.constructor %locations parse.error=custom api.value.type=union ... -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -input: -stderr: -./calc.at:1530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -error: null divisor -stderr: - | (- *) + (1 2) = 1 -./calc.at:1478: cat stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1479: cat stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -error: null divisor -550. calc.at:1478: ok - | (# + 1) = 1111 -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -583. calc.at:1524: skipped (calc.at:1524) -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1482: cat stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: -syntax error: invalid character: '#' - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: - -./calc.at:1489: cat stderr - | (1 + #) = 1111 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1480: cat stderr -584. calc.at:1525: input: - skipped (calc.at:1525) - | 1 + 2 * 3 + !+ ++ -./calc.at:1476: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -554. calc.at:1480: ok -stderr: -stdout: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -stderr: -586. calc.at:1531: testing Calculator D api.token.constructor %locations parse.error=detailed ... -./calc.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - | (* *) + (*) + (*) -./calc.at:1489: $PREPARSER ./calc input - -input: -./calc.at:1479: cat stderr - | 1 + 2 * 3 + !- ++ -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./calc.at:1476: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - -587. calc.at:1532: testing Calculator D api.push-pull=both ... -./calc.at:1532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -588. calc.at:1533: testing Calculator D parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... -stderr: -./calc.at:1533: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -589. calc.at:1544: testing Calculator Java ... -./calc.at:1544: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -stdout: -./types.at:139: $PREPARSER ./test -590. calc.at:1545: testing Calculator Java parse.error=custom ... -input: -./calc.at:1545: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -stderr: - | (1 + # + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -591. calc.at:1546: testing Calculator Java parse.error=detailed ... -./calc.at:1546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -585. calc.at:1530: 586. calc.at:1531: ======== Testing with C++ standard flags: '' -588. calc.at:1533: stderr: -syntax error: invalid character: '#' - skipped (calc.at:1531) - skipped (calc.at:1530) - skipped (calc.at:1533) -./calc.at:1476: cat stderr - - - -593. calc.at:1548: testing Calculator Java %locations parse.error=custom ... -./calc.at:1548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -589. calc.at:1544: ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - skipped (calc.at:1544) -592. calc.at:1547: testing Calculator Java parse.error=verbose ... -594. calc.at:1549: testing Calculator Java %locations parse.error=detailed ... -./calc.at:1547: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1549: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - -input: -./calc.at:1482: cat stderr - | (#) + (#) = 2222 -590. calc.at:1545: ./calc.at:1476: $PREPARSER ./calc input -587. calc.at:1532: stderr: -591. calc.at:1546: stderr: - skipped (calc.at:1545) -stdout: -syntax error: invalid character: '#' -./calc.at:1480: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - - skipped (calc.at:1532) - skipped (calc.at:1546) - - - -./calc.at:1489: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1480: $PREPARSER ./calc input -595. calc.at:1550: testing Calculator Java %locations parse.error=verbose ... -./calc.at:1550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -598. calc.at:1554: testing Calculator Java api.push-pull=both ... -596. calc.at:1551: testing Calculator Java parse.trace parse.error=verbose ... -./calc.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -input: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !+ ++ - | (# + 1) = 1111 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1489: $PREPARSER ./calc input -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -592. calc.at:1547: 594. calc.at:1549: stderr: -593. calc.at:1548: syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -597. calc.at:1552: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} ... -./calc.at:1552: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) - skipped (calc.at:1547) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - skipped (calc.at:1548) - skipped (calc.at:1549) - -input: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -595. calc.at:1550: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - | 1 2 -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: $PREPARSER ./calc input -stderr: -596. calc.at:1551: skipped (calc.at:1550) -stderr: -stderr: - skipped (calc.at:1551) -syntax error, unexpected number -./calc.at:1479: cat stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -598. calc.at:1554: stderr: -input: -syntax error, unexpected number -input: - | 1 + 2 * 3 + !- ++ - | (1 + 1) / (1 - 1) -./calc.at:1479: $PREPARSER ./calc input - skipped (calc.at:1554) -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1489: $PREPARSER ./calc input -stdout: -stderr: -./calc.at:1476: cat stderr -./calc.at:1492: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -stderr: -error: null divisor -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -597. calc.at:1552: | (1 + #) = 1111 -stderr: -./calc.at:1476: $PREPARSER ./calc input -stderr: -stderr: - -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -error: null divisor -input: - skipped (calc.at:1552) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stdout: -stderr: -./calc.at:1492: $PREPARSER ./calc input -599. calc.at:1555: testing Calculator Java api.push-pull=both parse.error=detailed %locations ... -./types.at:139: ./check -./calc.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -601. calc.at:1557: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} api.push-pull=both ... -./calc.at:1557: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -input: - | (1 + # + 1) = 1111 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1480: cat stderr - -stderr: -./calc.at:1482: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -602. calc.at:1560: testing Calculator Java parse.trace parse.error=custom %locations parse.lac=full ... -./calc.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -input: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -603. calc.at:1561: testing Calculator Java parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... -600. calc.at:1556: testing Calculator Java parse.trace parse.error=custom %locations %lex-param {InputStream is} api.push-pull=both ... -stderr: -stderr: - | 1//2 -./calc.at:1479: cat stderr -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1556: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1561: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: cat stderr -stderr: -552. calc.at:1479: ok -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 2 -./calc.at:1492: $PREPARSER ./calc input -input: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) - | (#) + (#) = 2222 -./calc.at:1476: cat stderr -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -stderr: -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: cat stderr -syntax error: invalid character: '#' -604. torture.at:132: testing Big triangle ... -./torture.at:138: "$PERL" -w ./gengram.pl 200 || exit 77 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -stderr: -605. torture.at:216: testing Big horizontal ... -input: -syntax error: invalid character: '#' -602. calc.at:1560: | (1 + 1) / (1 - 1) -./calc.at:1482: $PREPARSER ./calc input -./torture.at:230: "$PERL" -w ./gengram.pl 1000 || exit 77 -input: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - | error - skipped (calc.at:1560) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -./calc.at:1480: $PREPARSER ./calc input -stderr: -601. calc.at:1557: Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -603. calc.at:1561: ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr -stderr: -input: -syntax error, unexpected invalid token -600. calc.at:1556: -stderr: +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" END + { $$ = 175; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" END + { $$ = 176; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" END + { $$ = 177; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" END + { $$ = 178; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" END + { $$ = 179; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" END + { $$ = 180; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" END + { $$ = 181; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" END + { $$ = 182; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89 skipped (torture.at:274) Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 +Stack now 0 4 12 20 29 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) +Next token is token ')' (1.7: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R7 G12 S26 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +LAC: initial context discarded due to shift Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '/' () -Shifting token '/' () +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 22 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 22 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) +Next token is token ')' (1.17: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +LAC: initial context discarded due to shift Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) Entering state 31 +Stack now 0 8 22 31 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - skipped (calc.at:1557) - | 1//2 -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - skipped (calc.at:1556) - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - skipped (calc.at:1561) -599. calc.at:1555: ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - skipped (calc.at:1555) -606. torture.at:270: testing State number type: 128 states ... -./torture.at:270: ruby $abs_top_srcdir/tests/linear 128 >input.y || exit 77 - -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -input: - | (1 + #) = 1111 - -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: $PREPARSER ./calc input -stderr: - -stderr: ---- /dev/null 2025-09-09 11:24:00.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/stderr 2025-09-09 13:30:40.182933937 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/test-source: line 14: ruby: command not found -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -syntax error, unexpected invalid token -./calc.at:1476: cat stderr -606. torture.at:270: stdout: -stderr: -%code top { /* -*- c -*- */ -/* Adjust to the compiler. - We used to do it here, but each time we add a new line, - we have to adjust all the line numbers in error messages. - It's simpler to use a constant include to a varying file. */ -#include -} - -%define parse.error verbose -%debug -%{ -#include -#include -#define MAX 1000 -static int yylex (void); -#include - -/* !POSIX */ static void yyerror (const char *msg); -%} - -%token - t1 1 "1" - t2 2 "2" - t3 3 "3" - t4 4 "4" - t5 5 "5" - t6 6 "6" - t7 7 "7" - t8 8 "8" - t9 9 "9" - t10 10 "10" - t11 11 "11" - t12 12 "12" - t13 13 "13" - t14 14 "14" - t15 15 "15" - t16 16 "16" - t17 17 "17" - t18 18 "18" - t19 19 "19" - t20 20 "20" - t21 21 "21" - t22 22 "22" - t23 23 "23" - t24 24 "24" - t25 25 "25" - t26 26 "26" - t27 27 "27" - t28 28 "28" - t29 29 "29" - t30 30 "30" - t31 31 "31" - t32 32 "32" - t33 33 "33" - t34 34 "34" - t35 35 "35" - t36 36 "36" - t37 37 "37" - t38 38 "38" - t39 39 "39" - t40 40 "40" - t41 41 "41" - t42 42 "42" - t43 43 "43" - t44 44 "44" - t45 45 "45" - t46 46 "46" - t47 47 "47" - t48 48 "48" - t49 49 "49" - t50 50 "50" - t51 51 "51" - t52 52 "52" - t53 53 "53" - t54 54 "54" - t55 55 "55" - t56 56 "56" - t57 57 "57" - t58 58 "58" - t59 59 "59" - t60 60 "60" - t61 61 "61" - t62 62 "62" - t63 63 "63" - t64 64 "64" - t65 65 "65" - t66 66 "66" - t67 67 "67" - t68 68 "68" - t69 69 "69" - t70 70 "70" - t71 71 "71" - t72 72 "72" - t73 73 "73" - t74 74 "74" - t75 75 "75" - t76 76 "76" - t77 77 "77" - t78 78 "78" - t79 79 "79" - t80 80 "80" - t81 81 "81" - t82 82 "82" - t83 83 "83" - t84 84 "84" - t85 85 "85" - t86 86 "86" - t87 87 "87" - t88 88 "88" - t89 89 "89" - t90 90 "90" - t91 91 "91" - t92 92 "92" - t93 93 "93" - t94 94 "94" - t95 95 "95" - t96 96 "96" - t97 97 "97" - t98 98 "98" - t99 99 "99" - t100 100 "100" - t101 101 "101" - t102 102 "102" - t103 103 "103" - t104 104 "104" - t105 105 "105" - t106 106 "106" - t107 107 "107" - t108 108 "108" - t109 109 "109" - t110 110 "110" - t111 111 "111" - t112 112 "112" - t113 113 "113" - t114 114 "114" - t115 115 "115" - t116 116 "116" - t117 117 "117" - t118 118 "118" - t119 119 "119" - t120 120 "120" - t121 121 "121" - t122 122 "122" - t123 123 "123" - t124 124 "124" - t125 125 "125" - t126 126 "126" - t127 127 "127" - t128 128 "128" - t129 129 "129" - t130 130 "130" - t131 131 "131" - t132 132 "132" - t133 133 "133" - t134 134 "134" - t135 135 "135" - t136 136 "136" - t137 137 "137" - t138 138 "138" - t139 139 "139" - t140 140 "140" - t141 141 "141" - t142 142 "142" - t143 143 "143" - t144 144 "144" - t145 145 "145" - t146 146 "146" - t147 147 "147" - t148 148 "148" - t149 149 "149" - t150 150 "150" - t151 151 "151" - t152 152 "152" - t153 153 "153" - t154 154 "154" - t155 155 "155" - t156 156 "156" - t157 157 "157" - t158 158 "158" - t159 159 "159" - t160 160 "160" - t161 161 "161" - t162 162 "162" - t163 163 "163" - t164 164 "164" - t165 165 "165" - t166 166 "166" - t167 167 "167" - t168 168 "168" - t169 169 "169" - t170 170 "170" - t171 171 "171" - t172 172 "172" - t173 173 "173" - t174 174 "174" - t175 175 "175" - t176 176 "176" - t177 177 "177" - t178 178 "178" - t179 179 "179" - t180 180 "180" - t181 181 "181" - t182 182 "182" - t183 183 "183" - t184 184 "184" - t185 185 "185" - t186 186 "186" - t187 187 "187" - t188 188 "188" - t189 189 "189" - t190 190 "190" - t191 191 "191" - t192 192 "192" - t193 193 "193" - t194 194 "194" - t195 195 "195" - t196 196 "196" - t197 197 "197" - t198 198 "198" - t199 199 "199" - t200 200 "200" - t201 201 "201" - t202 202 "202" - t203 203 "203" - t204 204 "204" - t205 205 "205" - t206 206 "206" - t207 207 "207" - t208 208 "208" - t209 209 "209" - t210 210 "210" - t211 211 "211" - t212 212 "212" - t213 213 "213" - t214 214 "214" - t215 215 "215" - t216 216 "216" - t217 217 "217" - t218 218 "218" - t219 219 "219" - t220 220 "220" - t221 221 "221" - t222 222 "222" - t223 223 "223" - t224 224 "224" - t225 225 "225" - t226 226 "226" - t227 227 "227" - t228 228 "228" - t229 229 "229" - t230 230 "230" - t231 231 "231" - t232 232 "232" - t233 233 "233" - t234 234 "234" - t235 235 "235" - t236 236 "236" - t237 237 "237" - t238 238 "238" - t239 239 "239" - t240 240 "240" - t241 241 "241" - t242 242 "242" - t243 243 "243" - t244 244 "244" - t245 245 "245" - t246 246 "246" - t247 247 "247" - t248 248 "248" - t249 249 "249" - t250 250 "250" - t251 251 "251" - t252 252 "252" - t253 253 "253" - t254 254 "254" - t255 255 "255" - t256 256 "256" - t257 257 "257" - t258 258 "258" - t259 259 "259" - t260 260 "260" - t261 261 "261" - t262 262 "262" - t263 263 "263" - t264 264 "264" - t265 265 "265" - t266 266 "266" - t267 267 "267" - t268 268 "268" - t269 269 "269" - t270 270 "270" - t271 271 "271" - t272 272 "272" - t273 273 "273" - t274 274 "274" - t275 275 "275" - t276 276 "276" - t277 277 "277" - t278 278 "278" - t279 279 "279" - t280 280 "280" - t281 281 "281" - t282 282 "282" - t283 283 "283" - t284 284 "284" - t285 285 "285" - t286 286 "286" - t287 287 "287" - t288 288 "288" - t289 289 "289" - t290 290 "290" - t291 291 "291" - t292 292 "292" - t293 293 "293" - t294 294 "294" - t295 295 "295" - t296 296 "296" - t297 297 "297" - t298 298 "298" - t299 299 "299" - t300 300 "300" - t301 301 "301" - t302 302 "302" - t303 303 "303" - t304 304 "304" - t305 305 "305" - t306 306 "306" - t307 307 "307" - t308 308 "308" - t309 309 "309" - t310 310 "310" - t311 311 "311" - t312 312 "312" - t313 313 "313" - t314 314 "314" - t315 315 "315" - t316 316 "316" - t317 317 "317" - t318 318 "318" - t319 319 "319" - t320 320 "320" - t321 321 "321" - t322 322 "322" - t323 323 "323" - t324 324 "324" - t325 325 "325" - t326 326 "326" - t327 327 "327" - t328 328 "328" - t329 329 "329" - t330 330 "330" - t331 331 "331" - t332 332 "332" - t333 333 "333" - t334 334 "334" - t335 335 "335" - t336 336 "336" - t337 337 "337" - t338 338 "338" - t339 339 "339" - t340 340 "340" - t341 341 "341" - t342 342 "342" - t343 343 "343" - t344 344 "344" - t345 345 "345" - t346 346 "346" - t347 347 "347" - t348 348 "348" - t349 349 "349" - t350 350 "350" - t351 351 "351" - t352 352 "352" - t353 353 "353" - t354 354 "354" - t355 355 "355" - t356 356 "356" - t357 357 "357" - t358 358 "358" - t359 359 "359" - t360 360 "360" - t361 361 "361" - t362 362 "362" - t363 363 "363" - t364 364 "364" - t365 365 "365" - t366 366 "366" - t367 367 "367" - t368 368 "368" - t369 369 "369" - t370 370 "370" - t371 371 "371" - t372 372 "372" - t373 373 "373" - t374 374 "374" - t375 375 "375" - t376 376 "376" - t377 377 "377" - t378 378 "378" - t379 379 "379" - t380 380 "380" - t381 381 "381" - t382 382 "382" - t383 383 "383" - t384 384 "384" - t385 385 "385" - t386 386 "386" - t387 387 "387" - t388 388 "388" - t389 389 "389" - t390 390 "390" - t391 391 "391" - t392 392 "392" - t393 393 "393" - t394 394 "394" - t395 395 "395" - t396 396 "396" - t397 397 "397" - t398 398 "398" - t399 399 "399" - t400 400 "400" - t401 401 "401" - t402 402 "402" - t403 403 "403" - t404 404 "404" - t405 405 "405" - t406 406 "406" - t407 407 "407" - t408 408 "408" - t409 409 "409" - t410 410 "410" - t411 411 "411" - t412 412 "412" - t413 413 "413" - t414 414 "414" - t415 415 "415" - t416 416 "416" - t417 417 "417" - t418 418 "418" - t419 419 "419" - t420 420 "420" - t421 421 "421" - t422 422 "422" - t423 423 "423" - t424 424 "Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R10 G8 S24 +Reducing stack by rule 10 (line 106): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -424" - t425 425 "425" - t426 426 "426" - t427 427 "427" - t428 428 "428" - t429 429 "429" - t430 430 "430" - t431 431 "431" - t432 432 "432" - t433 433 "433" - t434 434 "434" - t435 435 "435" - t436 436 "436" - t437 437 "437" - t438 438 "438" - t439 439 "439" - t440 440 "440" - t441 441 "441" - t442 442 "442" - t443 443 "443" - t444 444 "444" - t445 445 "445" - t446 446 "446" - t447 447 "447" - t448 448 "448" - t449 449 "449" - t450 450 "450" - t451 451 "451" - t452 452 "452" - t453 453 "453" - t454 454 "454" - t455 455 "455" - t456 456 "456" - t457 457 "457" - t458 458 "458" - t459 459 "459" - t460 460 "460" - t461 461 "461" - t462 462 "462" - t463 463 "463" - t464 464 "464" - t465 465 "465" - t466 466 "466" - t467 467 "467" - t468 468 "468" - t469 469 "469" - t470 470 "470" - t471 471 "471" - t472 472 "472" - t473 473 "473" - t474 474 "474" - t475 475 "475" - t476 476 "476" - t477 477 "477" - t478 478 "478" - t479 479 "479" - t480 480 "480" - t481 481 "481" - t482 482 "482" - t483 483 "483" - t484 484 "484" - t485 485 "485" - t486 486 "486" - t487 487 "487" - t488 488 "488" - t489 489 "489" - t490 490 "490" - t491 491 "491" - t492 492 "492" - t493 493 "493" - t494 494 "494" - t495 495 "495" - t496 496 "496" - t497 497 "497" - t498 498 "498" - t499 499 "499" - t500 500 "500" - t501 501 "501" - t502 502 "502" - t503 503 "503" - t504 504 "504" - t505 505 "505" - t506 506 "506" - t507 507 "507" - t508 508 "508" - t509 509 "509" - t510 510 "510" - t511 511 "511" - t512 512 "512" - t513 513 "513" - t514 514 "514" - t515 515 "515" - t516 516 "516" - t517 517 "517" - t518 518 "518" - t519 519 "519" - t520 520 "520" - t521 521 "521" - t522 522 "522" - t523 523 "523" - t524 524 "524" - t525 525 "525" - t526 526 "526" - t527 527 "527" - t528 528 "528" - t529 529 "529" - t530 530 "530" - t531 531 "531" - t532 532 "532" - t533 533 "533" - t534 534 "534" - t535 535 "535" - t536 536 "536" - t537 537 "537" - t538 538 "538" - t539 539 "539" - t540 540 "540" - t541 541 "541" - t542 542 "542" - t543 543 "543" - t544 544 "544" - t545 545 "545" - t546 546 "546" - t547 547 "547" - t548 548 "548" - t549 549 "549" - t550 550 "550" - t551 551 "551" - t552 552 "552" - t553 553 "553" - t554 554 "554" - t555 555 "555" - t556 556 "556" - t557 557 "557" - t558 558 "558" - t559 559 "559" - t560 560 "560" - t561 561 "561" - t562 562 "562" - t563 563 "563" - t564 564 "564" - t565 565 "565" - t566 566 "566" - t567 567 "567" - t568 568 "568" - t569 569 "569" - t570 570 "570" - t571 571 "571" - t572 572 "572" - t573 573 "573" - t574 574 "574" - t575 575 "575" - t576 576 "576" - t577 577 "577" - t578 578 "578" - t579 579 "579" - t580 580 "580" - t581 581 "581" - t582 582 "582" - t583 583 "583" - t584 584 "584" - t585 585 "585" - t586 586 "586" - t587 587 "587" - t588 588 "588" - t589 589 "589" - t590 590 "590" - t591 591 "591" - t592 592 "592" - t593 593 "593" - t594 594 "594" - t595 595 "595" - t596 596 "596" - t597 597 "597" - t598 598 "598" - t599 599 "599" - t600 600 "600" - t601 601 "601" - t602 602 "602" - t603 603 "603" - t604 604 "604" - t605 605 "605" - t606 606 "606" - t607 607 "607" - t608 608 "608" - t609 609 "609" - t610 610 "610" - t611 611 "611" - t612 612 "612" - t613 613 "613" - t614 614 "614" - t615 615 "615" - t616 616 "616" - t617 617 "617" - t618 618 "618" - t619 619 "619" - t620 620 "620" - t621 621 "621" - t622 622 "622" - t623 623 "623" - t624 624 "624" - t625 625 "625" - t626 626 "626" - t627 627 "627" - t628 628 "628" - t629 629 "629" - t630 630 "630" - t631 631 "631" - t632 632 "632" - t633 633 "633" - t634 634 "634" - t635 635 "635" - t636 636 "636" - t637 637 "637" - t638 638 "638" - t639 639 "639" - t640 640 "640" - t641 641 "641" - t642 642 "642" - t643 643 "643" - t644 644 "644" - t645 645 "645" - t646 646 "646" - t647 647 "647" - t648 648 "648" - t649 649 "649" - t650 650 "650" - t651 651 "651" - t652 652 "652" - t653 653 "653" - t654 654 "654" - t655 655 "655" - t656 656 "656" - t657 657 "657" - t658 658 "658" - t659 659 "659" - t660 660 "660" - t661 661 "661" - t662 662 "662" - t663 663 "663" - t664 664 "664" - t665 665 "665" - t666 666 "666" - t667 667 "667" - t668 668 "668" - t669 669 "669" - t670 670 "670" - t671 671 "671" - t672 672 "672" - t673 673 "673" - t674 674 "674" - t675 675 "675" - t676 676 "676" - t677 677 "677" - t678 678 "678" - t679 679 "679" - t680 680 "680" - t681 681 "681" - t682 682 "682" - t683 683 "683" - t684 684 "684" - t685 685 "685" - t686 686 "686" - t687 687 "687" - t688 688 "688" - t689 689 "689" - t690 690 "690" - t691 691 "691" - t692 692 "692" - t693 693 "693" - t694 694 "694" - t695 695 "695" - t696 696 "696" - t697 697 "697" - t698 698 "698" - t699 699 "699" - t700 700 "700" - t701 701 "701" - t702 702 "702" - t703 703 "703" - t704 704 "704" - t705 705 "705" - t706 706 "706" - t707 707 "707" - t708 708 "708" - t709 709 "709" - t710 710 "710" - t711 711 "711" - t712 712 "712" - t713 713 "713" - t714 714 "714" - t715 715 "715" - t716 716 "716" - t717 717 "717" - t718 718 "718" - t719 719 "719" - t720 720 "720" - t721 721 "721" - t722 722 "722" - t723 723 "723" - t724 724 "724" - t725 725 "725" - t726 726 "726" - t727 727 "727" - t728 728 "728" - t729 729 "729" - t730 730 "730" - t731 731 "731" - t732 732 "732" - t733 733 "733" - t734 734 "734" - t735 735 "735" - t736 736 "736" - t737 737 "737" - t738 738 "738" - t739 739 "739" - t740 740 "740" - t741 741 "741" - t742 742 "742" - t743 743 "743" - t744 744 "744" - t745 745 "745" - t746 746 "746" - t747 747 "747" - t748 748 "748" - t749 749 "749" - t750 750 "750" - t751 751 "751" - t752 752 "752" - t753 753 "753" - t754 754 "754" - t755 755 "755" - t756 756 "756" - t757 757 "757" - t758 758 "758" - t759 759 "759" - t760 760 "760" - t761 761 "761" - t762 762 "762" - t763 763 "763" - t764 764 "764" - t765 765 "765" - t766 766 "766" - t767 767 "767" - t768 768 "768" - t769 769 "769" - t770 770 "770" - t771 771 "771" - t772 772 "772" - t773 773 "773" - t774 774 "774" - t775 775 "775" - t776 776 "776" - t777 777 "777" - t778 778 "778" - t779 779 "779" - t780 780 "780" - t781 781 "781" - t782 782 "782" - t783 783 "783" - t784 784 "784" - t785 785 "785" - t786 786 "786" - t787 787 "787" - t788 788 "788" - t789 789 "789" - t790 790 "790" - t791 791 "791" - t792 792 "792" - t793 793 "793" - t794 794 "794" - t795 795 "795" - t796 796 "796" - t797 797 "797" - t798 798 "798" - t799 799 "799" - t800 800 "800" - t801 801 "801" - t802 802 "802" - t803 803 "803" - t804 804 "804" - t805 805 "805" - t806 806 "806" - t807 807 "807" - t808 808 "808" - t809 809 "809" - t810 810 "810" - t811 811 "811" - t812 812 "812" - t813 813 "813" - t814 814 "814" - t815 815 "815" - t816 816 "816" - t817 817 "817" - t818 818 "818" - t819 819 "819" - t820 820 "820" - t821 821 "821" - t822 822 "822" - t823 823 "823" - t824 824 "824" - t825 825 "825" - t826 826 "826" - t827 827 "827" - t828 828 "828" - t829 829 "829" - t830 830 "830" - t831 831 "831" - t832 832 "832" - t833 833 "833" - t834 834 "834" - t835 835 "835" - t836 836 "836" - t837 837 "837" - t838 838 "838" - t839 839 "839" - t840 840 "840" - t841 841 "841" - t842 842 "842" - t843 843 "843" - t844 844 "844" - t845 845 "845" - t846 846 "846" - t847 847 "847" - t848 848 "848" - t849 849 "849" - t850 850 "850" - t851 851 "851" - t852 852 "852" - t853 853 "853" - t854 854 "854" - t855 855 "855" - t856 856 "856" - t857 857 "857" - t858 858 "858" - t859 859 "859" - t860 860 "860" - t861 861 "861" - t862 862 "862" - t863 863 "863" - t864 864 "864" - t865 865 "865" - t866 866 "866" - t867 867 "867" - t868 868 "868" - t869 869 "869" - t870 870 "870" - t871 871 "871" - t872 872 "872" - t873 873 "873" - t874 874 "874" - t875 875 "875" - t876 876 "876" - t877 877 "877" - t878 878 "878" - t879 879 "879" - t880 880 "880" - t881 881 "881" - t882 882 "882" - t883 883 "883" - t884 884 "884" - t885 885 "885" - t886 886 "886" - t887 887 "887" - t888 888 "888" - t889 889 "889" - t890 890 "890" - t891 891 "891" - t892 892 "892" - t893 893 "893" - t894 894 "894" - t895 895 "895" - t896 896 "896" - t897 897 "897" - t898 898 "898" - t899 899 "899" - t900 900 "900" - t901 901 "901" - t902 902 "902" - t903 903 "903" - t904 904 "904" - t905 905 "905" - t906 906 "906" - t907 907 "907" - t908 908 "908" - t909 909 "909" - t910 910 "910" - t911 911 "911" - t912 912 "912" - t913 913 "913" - t914 914 "914" - t915 915 "915" - t916 916 "916" - t917 917 "917" - t918 918 "918" - t919 919 "919" - t920 920 "920" - t921 921 "921" - t922 922 "922" - t923 923 "923" - t924 924 "924" - t925 925 "925" - t926 926 "926" - t927 927 "927" - t928 928 "928" - t929 929 "929" - t930 930 "930" - t931 931 "931" - t932 932 "932" - t933 933 "933" - t934 934 "934" - t935 935 "935" - t936 936 "936" - t937 937 "937" - t938 938 "938" - t939 939 "939" - t940 940 "940" - t941 941 "941" - t942 942 "942" - t943 943 "943" - t944 944 "944" - t945 945 "945" - t946 946 "946" - t947 947 "947" - t948 948 "948" - t949 949 "949" - t950 950 "950" - t951 951 "951" - t952 952 "952" - t953 953 "953" - t954 954 "954" - t955 955 "955" - t956 956 "956" - t957 957 "957" - t958 958 "958" - t959 959 "959" - t960 960 "960" - t961 961 "961" - t962 962 "962" - t963 963 "963" - t964 964 "964" - t965 965 "965" - t966 966 "966" - t967 967 "967" - t968 968 "968" - t969 969 "969" - t970 970 "970" - t971 971 "971" - t972 972 "972" - t973 973 "973" - t974 974 "974" - t975 975 "975" - t976 976 "976" - t977 977 "977" - t978 978 "978" - t979 979 "979" - t980 980 "980" - t981 981 "981" - t982 982 "982" - t983 983 "983" - t984 984 "984" - t985 985 "985" - t986 986 "986" - t987 987 "987" - t988 988 "988" - t989 989 "989" - t990 990 "990" - t991 991 "991" - t992 992 "992" - t993 993 "993" - t994 994 "994" - t995 995 "995" - t996 996 "996" - t997 997 "997" - t998 998 "998" - t999 999 "999" - t1000 1000 "1000" - -%% -exp: "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" "199" "200" "201" "202" "203" "204" "205" "206" "207" "208" - "209" "210" "211" "212" "213" "214" "215" "216" "217" "218" "219" "220" - "221" "222" "223" "224" "225" "226" "227" "228" "229" "230" "231" "232" - "233" "234" "235" "236" "237" "238" "239" "240" "241" "242" "243" "244" - "245" "246" "247" "248" "249" "250" "251" "252" "253" "254" "255" "256" - "257" "258" "259" "260" "261" "262" "263" "264" "265" "266" "267" "268" - "269" "270" "271" "272" "273" "274" "275" "276" "277" "278" "279" "280" - "281" "282" "283" "284" "285" "286" "287" "288" "289" "290" "291" "292" - "293" "294" "295" "296" "297" "298" "299" "300" "301" "302" "303" "304" - "305" "306" "307" "308" "309" "310" "311" "312" "313" "314" "315" "316" - "317" "318" "319" "320" "321" "322" "323" "324" "325" "326" "327" "328" - "329" "330" "331" "332" "333" "334" "335" "336" "337" "338" "339" "340" - "341" "342" "343" "344" "345" "346" "347" "348" "349" "350" "351" "352" - "353" "354" "355" "356" "357" "358" "359" "360" "361" "362" "363" "364" - "365" "366" "367" "368" "369" "370" "371" "372" "373" "374" "375" "376" - "377" "378" "379" "380" "381" "382" "383" "384" "385" "386" "387" "388" - "389" "390" "391" "392" "393" "394" "395" "396" "397" "398" "399" "400" - "401" "402" "403" "404" "405" "406" "407" "408" "409" "410" "411" "412" - "413" "414" "415" "416" "417" "418" "419" "420" "421" "422" "423" "424" - "425" "426" "427" "428" "429" "430" "431" "432" "433" "434" "435" "436" - "437" "438" "439" "440" "441" "442" "443" "444" "445" "446" "447" "448" - "449" "450" "451" "452" "453" "454" "455" "456" "457" "458" "459" "460" - "461" "462" "463" "464" "465" "466" "467" "468" "469" "470" "471" "472" - "473" "474" "475" "476" "477" "478" "479" "480" "481" "482" "483" "484" - "485" "486" "487" "488" "489" "490" "491" "492" "493" "494" "495" "496" - "497" "498" "499" "500" "501" "502" "503" "504" "505" "506" "507" "508" - "509" "510" "511" "512" "513" "514" "515" "516" "517" "518" "519" "520" - "521" "522" "523" "524" "525" "526" "527" "528" "529" "530" "531" "532" - "533" "534" "535" "536" "537" "538" "539" "540" "541" "542" "543" "544" - "545" "546" "547" "548" "549" "550" "551" "552" "553" "554" "555" "556" - "557" "558" "559" "560" "561" "562" "563" "564" "565" "566" "567" "568" - "569" "570" "571" "572" "573" "574" "575" "576" "577" "578" "579" "580" - "581" "582" "583" "584" "585" "586" "587" "588" "589" "590" "591" "592" - "593" "594" "595" "596" "597" "598" "599" "600" "601" "602" "603" "604" - "605" "606" "607" "608" "609" "610" "611" "612" "613" "614" "615" "616" - "617" "618" "619" "620" "621" "622" "623" "624" "625" "626" "627" "628" - "629" "630" "631" "632" "633" "634" "635" "636" "637" "638" "639" "640" - "641" "642" "643" "644" "645" "646" "647" "648" "649" "650" "651" "652" - "653" "654" "655" "656" "657" "658" "659" "660" "661" "662" "663" "664" - "665" "666" "667" "668" "669" "670" "671" "672" "673" "674" "675" "676" - "677" "678" "679" "680" "681" "682" "683" "684" "685" "686" "687" "688" - "689" "690" "691" "692" "693" "694" "695" "696" "697" "698" "699" "700" - "701" "702" "703" "704" "705" "706" "707" "708" "709" "710" "711" "712" - "713" "714" "715" "716" "717" "718" "719" "720" "721" "722" "723" "724" - "725" "726" "727" "728" "729" "730" "731" "732" "733" "734" "735" "736" - "737" "738" "739" "740" "741" "742" "743" "744" "745" "746" "747" "748" - "749" "750" "751" "752" "753" "754" "755" "756" "757" "758" "759" "760" - "761" "762" "763" "764" "765" "766" "767" "768" "769" "770" "771" "772" - "773" "774" "775" "776" "777" "778" "779" "780" "781" "782" "783" "784" - "785" "786" "787" "788" "789" "790" "791" "792" "793" "794" "795" "796" - "797" "798" "799" "800" "801" "802" "803" "804" "805" "806" "807" "808" - "809" "810" "811" "812" "813" "814" "815" "816" "817" "818" "819" "820" - "821" "822" "823" "824" "825" "826" "827" "828" "829" "830" "831" "832" - "833" "834" "835" "836" "837" "838" "839" "840" "841" "842" "843" "844" - "845" "846" "847" "848" "849" "850" "851" "852" "853" "854" "855" "856" - "857" "858" "859" "860" "861" "862" "863" "864" "865" "866" "867" "868" - "869" "870" "871" "872" "873" "874" "875" "876" "877" "878" "879" "880" - "881" "882" "883" "884" "885" "886" "887" "888" "889" "890" "891" "892" - "893" "894" "895" "896" "897" "898" "899" "900" "901" "902" "903" "904" - "905" "906" "907" "908" "909" "910" "911" "912" "913" "914" "915" "916" - "917" "918" "919" "920" "921" "922" "923" "924" "925" "926" "927" "928" - "929" "930" "931" "932" "933" "934" "935" "936" "937" "938" "939" "940" - "941" "942" "943" "944" "945" "946" "947" "948" "949" "950" "951" "952" - "953" "954" "955" "956" "957" "958" "959" "960" "961" "962" "963" "964" - "965" "966" "967" "968" "969" "970" "971" "972" "973" "974" "975" "976" - "977" "978" "979" "980" "981" "982" "983" "984" "985" "986" "987" "988" - "989" "990" "991" "992" "993" "994" "995" "996" "997" "998" "999" "1000" - ; +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" END + { $$ = 183; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + END + { $$ = 184; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" END + { $$ = 185; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" END + { $$ = 186; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" END + { $$ = 187; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" END + { $$ = 188; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" END + { $$ = 189; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" END + { $$ = 190; } +| "1" "2" "3" "4" " skipped (calc.at:1561) +5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" END + { $$ = 191; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" END + { $$ = 192; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" END + { $$ = 193; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" END + { $$ = 194; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" END + { $$ = 195; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + END + { $$ = 196; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" END + { $$ = 197; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" END + { $$ = 198; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" "199" END + { $$ = 199; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" "199" "200" END + { $$ = 200; } +; %% -#include @@ -192365,11 +182588,17 @@ static int yylex (void) { - static int counter = 1; - if (counter <= MAX) - return counter++; - assert (counter++ == MAX + 1); - return 0; + static int inner = 1; + static int outer = 0; + if (outer > MAX) + return 0; + else if (inner > outer) + { + inner = 1; + ++outer; + return END; + } + return inner++; } #include /* getenv. */ #include /* strcmp. */ @@ -192380,217 +182609,47 @@ (void) argv; return yyparse (); } - skipped (torture.at:270) -./calc.at:1482: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1476: $PREPARSER ./calc input - -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1492: cat stderr -556. calc.at:1482: ok -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1480: cat stderr - | error -./calc.at:1492: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' + skipped (calc.at:1560) +./torture.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +536. calc.at:1449: ok + + + + + + + input: - | 1 = 2 = 3 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error, unexpected '=' - -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -607. torture.at:271: testing State number type: 129 states ... -./torture.at:271: ruby $abs_top_srcdir/tests/linear 129 >input.y || exit 77 ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1469: cat stderr ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: cat stderr -stderr: -syntax error, unexpected '=' ---- /dev/null 2025-09-09 11:24:00.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/stderr 2025-09-09 13:30:40.318934989 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/test-source: line 14: ruby: command not found -607. torture.at:271: input: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -611. torture.at:275: testing State number type: 65536 states ... -./torture.at:275: ruby $abs_top_srcdir/tests/linear 65536 >input.y || exit 77 - | (# + 1) = 1111 - skipped (torture.at:271) -./calc.at:1476: cat stderr -610. torture.at:274: testing State number type: 32768 states ... -./torture.at:274: ruby $abs_top_srcdir/tests/linear 32768 >input.y || exit 77 -608. torture.at:272: testing State number type: 256 states ... -./torture.at:272: ruby $abs_top_srcdir/tests/linear 256 >input.y || exit 77 ---- /dev/null 2025-09-09 11:24:00.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/stderr 2025-09-09 13:30:40.342935175 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/test-source: line 14: ruby: command not found -./calc.at:1489: $PREPARSER ./calc input ---- /dev/null 2025-09-09 11:24:00.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/stderr 2025-09-09 13:30:40.350935237 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/test-source: line 14: ruby: command not found -608. torture.at:272: --- /dev/null 2025-09-09 11:24:00.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/stderr 2025-09-09 13:30:40.342935175 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/test-source: line 14: ruby: command not found -609. torture.at:273: testing State number type: 257 states ... -./torture.at:273: ruby $abs_top_srcdir/tests/linear 257 >input.y || exit 77 -./calc.at:1492: cat stderr -612. torture.at:276: testing State number type: 65537 states ... -./torture.at:276: ruby $abs_top_srcdir/tests/linear 65537 >input.y || exit 77 -611. torture.at:275: input: - -610. torture.at:274: | (1 + 1) / (1 - 1) - skipped (torture.at:272) -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -192600,281 +182659,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 ---- /dev/null 2025-09-09 11:24:00.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/stderr 2025-09-09 13:30:40.362935329 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/test-source: line 14: ruby: command not found - skipped (torture.at:275) ---- /dev/null 2025-09-09 11:24:00.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/stderr 2025-09-09 13:30:40.358935298 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/test-source: line 14: ruby: command not found -./calc.at:1476: $PREPARSER ./calc input -609. torture.at:273: input: +input: +./calc.at:1485: cat stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - skipped (torture.at:274) - | 1 = 2 = 3 -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1480: cat stderr -612. torture.at:276: skipped (torture.at:273) stderr: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 error: null divisor - - skipped (torture.at:276) -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: - - -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) - -input: -stderr: - -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1480: $PREPARSER ./calc input -error: null divisor -613. torture.at:385: testing Many lookahead tokens ... -stderr: +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: $PREPARSER ./calc input stderr: -./torture.at:387: "$PERL" -w ./gengram.pl 1000 || exit 77 -syntax error, unexpected '+' -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) +syntax error, unexpected number +error: 2222 != 1 ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: cat stderr stderr: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '+' -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr input: - | (1 + # + 1) = 1111 -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -192884,219 +182700,189 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: $PREPARSER ./calc input -614. torture.at:485: testing Exploding the Stack Size with Alloca ... -616. existing.at:74: testing GNU AWK 3.1.0 Grammar: LALR(1) ... -./torture.at:494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + | + | +1 +./calc.at:1485: $PREPARSER ./calc input +syntax error, unexpected number +error: 2222 != 1 + | 1 + 2 * 3 + !+ ++ +./calc.at:1476: cat stderr +./calc.at:1451: $PREPARSER ./calc input stderr: -input: -618. existing.at:74: testing GNU AWK 3.1.0 Grammar: Canonical LR(1) ... -./calc.at:1480: cat stderr Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token ')' (1.7: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R7 G12 S26 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - | - | +1 -./calc.at:1492: $PREPARSER ./calc input -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./calc.at:1476: cat stderr -617. existing.at:74: testing GNU AWK 3.1.0 Grammar: IELR(1) ... -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: -./calc.at:1480: $PREPARSER ./calc /dev/null -stderr: -stdout: -615. torture.at:531: testing Exploding the Stack Size with Malloc ... -./torture.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Stack now 0 8 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -619. existing.at:808: testing GNU Cim Grammar: LALR(1) ... -Starting parse -Entering state 0 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 22 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 22 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token ')' (1.17: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R10 G8 S24 +Reducing stack by rule 10 (line 106): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -547. calc.at:1476: ok -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +612. torture.at:276: testing State number type: 65537 states ... +./torture.at:276: ruby $abs_top_srcdir/tests/linear 65537 >input.y || exit 77 +613. torture.at:385: testing Many lookahead tokens ... +./torture.at:387: "$PERL" -w ./gengram.pl 1000 || exit 77 +611. torture.at:275: testing State number type: 65536 states ... +./torture.at:275: ruby $abs_top_srcdir/tests/linear 65536 >input.y || exit 77 +615. torture.at:531: testing Exploding the Stack Size with Malloc ... +./torture.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +614. torture.at:485: testing Exploding the Stack Size with Alloca ... +616. existing.at:74: testing GNU AWK 3.1.0 Grammar: LALR(1) ... +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +--- /dev/null 2026-10-11 17:46:22.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/stderr 2026-10-12 20:01:51.260870317 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/test-source: line 14: ruby: command not found +./torture.at:494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +617. existing.at:74: testing GNU AWK 3.1.0 Grammar: IELR(1) ... +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +--- /dev/null 2026-10-11 17:46:22.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/stderr 2026-10-12 20:01:51.264870317 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/test-source: line 14: ruby: command not found +./calc.at:1446: cat stderr +stdout: %define parse.error verbose %debug %{ @@ -196252,33 +186038,147 @@ (void) argv; return yyparse (); } -syntax error, unexpected end of input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +input: +./calc.at:1477: cat stderr +input: +./calc.at:1478: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +./calc.at:1477: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./torture.at:494: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1479: cat stderr +./torture.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +611. torture.at:275: | 1//2 +./calc.at:1445: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1448: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1443: cat stderr +./calc.at:1469: $PREPARSER ./calc input +612. torture.at:276: stderr: +stderr: +stderr: +stderr: stderr: + skipped (torture.at:276) +error: null divisor + skipped (torture.at:275) Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1476: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./torture.at:393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -./calc.at:1489: "$PERL" -pi -e 'use strict; +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +input: +stderr: +input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +550. calc.at:1478: | (* *) + (*) + (*) + ok +stdout: + | error +stdout: +548. calc.at:1477: stderr: +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input + ok + | (!!) + (1 2) = 1 +input: +stderr: +input: +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1477: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +syntax error +syntax error +error: 2222 != 1 my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" @@ -196286,8 +186186,73 @@ }eg ' expout || exit 77 stderr: -syntax error, unexpected end of input -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1491: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' + | (- *) + (1 2) = 1 +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1492: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stderr: +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1476: $PREPARSER ./calc input +stderr: +syntax error, unexpected invalid token +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -196297,9 +186262,159 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: cat stderr +./calc.at:1478: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1476: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; +stderr: +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: + +syntax error +syntax error +error: 2222 != 1 +input: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1491: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1492: $PREPARSER ./calc input + + +input: +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -196309,615 +186424,1206 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./torture.at:494: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS input: -./calc.at:1480: cat stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1443: $PREPARSER ./calc input | (1 + 1) / (1 - 1) -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1492: cat stderr +./calc.at:1445: $PREPARSER ./calc input +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.6: syntax error: invalid character: '#' +stderr: +input: +input: +./calc.at:1489: cat stderr +syntax error +syntax error +error: 2222 != 1 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1482: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1451: $PREPARSER ./calc input +error: null divisor +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | (!!) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input +stderr: +stderr: +stderr: +syntax error +syntax error +error: 2222 != 1 stderr: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected invalid token Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 +Stack now 0 4 12 20 29 Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 +Stack now 0 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (2) $3 = token ')' () -> $$ = nterm exp (2) Entering state 8 +Stack now 0 8 Reading a token Next token is token '/' () Shifting token '/' () Entering state 22 +Stack now 0 8 22 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 +Stack now 0 8 22 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 +Stack now 0 8 22 4 12 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (1) -> $$ = nterm exp (0) Entering state 12 +Stack now 0 8 22 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (0) $3 = token ')' () -> $$ = nterm exp (0) Entering state 31 +Stack now 0 8 22 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): +Reducing stack by rule 10 (line 93): $1 = nterm exp (2) $2 = token '/' () $3 = nterm exp (0) error: null divisor -> $$ = nterm exp (2) Entering state 8 +Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of input () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () Cleanup: popping nterm input () -input: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: $PREPARSER ./calc /dev/null - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1480: $PREPARSER ./calc input +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +1.11: syntax error +1.1-16: error: 2222 != 1 stderr: stderr: +./calc.at:1455: cat stderr +input: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Starting parse -Entering state 0 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token ')' () +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token -Next token is token ')' () +Next token is token ')' (10.11: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1477: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -stderr: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. -./calc.at:1477: $PREPARSER ./calc input - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -./calc.at:1492: cat stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -620. existing.at:808: testing GNU Cim Grammar: IELR(1) ... -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -input: -stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1480: cat stderr -563. calc.at:1489: ./calc.at:1478: $PREPARSER ./calc input - ok -input: -stderr: - | 1 2 -./calc.at:1477: $PREPARSER ./calc input -Starting parse -Entering state 0 +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 16 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1482: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '*' (1.39: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -196925,4889 +187631,2992 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error - | (!!) + (1 2) = 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 Reading a token -Next token is token '+' (1.17: ) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (5.11-6.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -1.3: syntax error -input: -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error, unexpected number -error: 2222 != 1 -1.3: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1492: cat stderr -1.3: syntax error -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stdout: - | (!!) + (1 2) = 1 -%code top { /* -*- c -*- */ -/* Adjust to the compiler. - We used to do it here, but each time we add a new line, - we have to adjust all the line numbers in error messages. - It's simpler to use a constant include to a varying file. */ -#include -} - -%define parse.error verbose -%debug -%{ -#include -#include -#include -#define MAX 200 -static int yylex (void); -#include - -/* !POSIX */ static void yyerror (const char *msg); -%} -%union -{ - int val; -}; - -%token END "end" -%type exp input -%token t1 1 "1" -%token t2 2 "2" -%token t3 3 "3" -%token t4 4 "4" -%token t5 5 "5" -%token t6 6 "6" -%token t7 7 "7" -%token t8 8 "8" -%token t9 9 "9" -%token t10 10 "10" -%token t11 11 "11" -%token t12 12 "12" -%token t13 13 "13" -%token t14 14 "14" -%token t15 15 "15" -%token t16 16 "16" -%token t17 17 "17" -%token t18 18 "18" -%token t19 19 "19" -%token t20 20 "20" -%token t21 21 "21" -%token t22 22 "22" -%token t23 23 "23" -%token t24 24 "24" -%token t25 25 "25" -%token t26 26 "26" -%token t27 27 "27" -%token t28 28 "28" -%token t29 29 "29" -%token t30 30 "30" -%token t31 31 "31" -%token t32 32 "32" -%token t33 33 "33" -%token t34 34 "34" -%token t35 35 "35" -%token t36 36 "36" -%token t37 37 "37" -%token t38 38 "38" -%token t39 39 "39" -%token t40 40 "40" -%token t41 41 "41" -%token t42 42 "42" -%token t43 43 "43" -%token t44 44 "44" -%token t45 45 "45" -%token t46 46 "46" -%token t47 47 "47" -%token t48 48 "48" -%token t49 49 "49" -%token t50 50 "50" -%token t51 51 "51" -%token t52 52 "52" -%token t53 53 "53" -%token t54 54 "54" -%token t55 55 "55" -%token t56 56 "56" -%token t57 57 "57" -%token t58 58 "58" -%token t59 59 "59" -%token t60 60 "60" -%token t61 61 "61" -%token t62 62 "62" -%token t63 63 "63" -%token t64 64 "64" -%token t65 65 "65" -%token t66 66 "66" -%token t67 67 "67" -%token t68 68 "68" -%token t69 69 "69" -%token t70 70 "70" -%token t71 71 "71" -%token t72 72 "72" -%token t73 73 "73" -%token t74 74 "74" -%token t75 75 "75" -%token t76 76 "76" -%token t77 77 "77" -%token t78 78 "78" -%token t79 79 "79" -%token t80 80 "80" -%token t81 81 "81" -%token t82 82 "82" -%token t83 83 "83" -%token t84 84 "84" -%token t85 85 "85" -%token t86 86 "86" -%token t87 87 "87" -%token t88 88 "88" -%token t89 89 "89" -%token t90 90 "90" -%token t91 91 "91" -%token t92 92 "92" -%token t93 93 "93" -%token t94 94 "94" -%token t95 95 "95" -%token t96 96 "96" -%token t97 97 "97" -%token t98 98 "98" -%token t99 99 "99" -%token t100 100 "100" -%token t101 101 "101" -%token t102 102 "102" -%token t103 103 "103" -%token t104 104 "104" -%token t105 105 "105" -%token t106 106 "106" -%token t107 107 "107" -%token t108 108 "108" -%token t109 109 "109" -%token t110 110 "110" -%token t111 111 "111" -%token t112 112 "112" -%token t113 113 "113" -%token t114 114 "114" -%token t115 115 "115" -%token t116 116 "116" -%token t117 117 "117" -%token t118 118 "118" -%token t119 119 "119" -%token t120 120 "120" -%token t121 121 "121" -%token t122 122 "122" -%token t123 123 "123" -%token t124 124 "124" -%token t125 125 "125" -%token t126 126 "126" -%token t127 127 "127" -%token t128 128 "128" -%token t129 129 "129" -%token t130 130 "130" -%token t131 131 "131" -%token t132 132 "132" -%token t133 133 "133" -%token t134 134 "134" -%token t135 135 "135" -%token t136 136 "136" -%token t137 137 "137" -%token t138 138 "138" -%token t139 139 "139" -%token t140 140 "140" -%token t141 141 "141" -%token t142 142 "142" -%token t143 143 "143" -%token t144 144 "144" -%token t145 145 "145" -%token t146 146 "146" -%token t147 147 "147" -%token t148 148 "148" -%token t149 149 "149" -%token t150 150 "150" -%token t151 151 "151" -%token t152 152 "152" -%token t153 153 "153" -%token t154 154 "154" -%token t155 155 "155" -%token t156 156 "156" -%token t157 157 "157" -%token t158 158 "158" -%token t159 159 "159" -%token t160 160 "160" -%token t161 161 "161" -%token t162 162 "162" -%token t163 163 "163" -%token t164 164 "164" -%token t165 165 "165" -%token t166 166 "166" -%token t167 167 "167" -%token t168 168 "168" -%token t169 169 "169" -%token t170 170 "170" -%token t171 171 "171" -%token t172 172 "172" -%token t173 173 "173" -%token t174 174 "174" -%token t175 175 "175" -%token t176 176 "176" -%token t177 177 "177" -%token t178 178 "178" -%token t179 179 "179" -%token t180 180 "180" -%token t181 181 "181" -%token t182 182 "182" -%token t183 183 "183" -%token t184 184 "184" -%token t185 185 "185" -%token t186 186 "186" -%token t187 187 "187" -%token t188 188 "188" -%token t189 189 "189" -%token t190 190 "190" -%token t191 191 "191" -%token t192 192 "192" -%token t193 193 "193" -%token t194 194 "194" -%token t195 195 "195" -%token t196 196 "196" -%token t197 197 "197" -%token t198 198 "198" -%token t199 199 "199" -%token t200 200 "200" -%% -input: - exp { assert ($1 == 0); $$ = $1; } -| input exp { assert ($2 == $1 + 1); $$ = $2; } -; - -exp: - END - { $$ = 0; } -| "1" END - { $$ = 1; } -| "1" "2" END - { $$ = 2; } -| "1" "2" "3" END - { $$ = 3; } -| "1" "2" "3" "4" END - { $$ = 4; } -| "1" "2" "3" "4" "5" END - { $$ = 5; } -| "1" "2" "3" "4" "5" "6" END - { $$ = 6; } -| "1" "2" "3" "4" "5" "6" "7" END - { $$ = 7; } -| "1" "2" "3" "4" "5" "6" "7" "8" END - { $$ = 8; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" END - { $$ = 9; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" END - { $$ = 10; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" END - { $$ = 11; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" END - { $$ = 12; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" END - { $$ = 13; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" END - { $$ = 14; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" END - { $$ = 15; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - END - { $$ = 16; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" END - { $$ = 17; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" END - { $$ = 18; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" END - { $$ = 19; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" END - { $$ = 20; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" END - { $$ = 21; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" END - { $$ = 22; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" END - { $$ = 23; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" END - { $$ = 24; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" END - { $$ = 25; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" END - { $$ = 26; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" END - { $$ = 27; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" END - { $$ = 28; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" END - { $$ = 29; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - END - { $$ = 30; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" END - { $$ = 31; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" END - { $$ = 32; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" END - { $$ = 33; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" END - { $$ = 34; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" END - { $$ = 35; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" END - { $$ = 36; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" END - { $$ = 37; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" END - { $$ = 38; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" END - { $$ = 39; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" END - { $$ = 40; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" END - { $$ = 41; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" END - { $$ = 42; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" END - { $$ = 43; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - END - { $$ = 44; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" END - { $$ = 45; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" END - { $$ = 46; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" END - { $$ = 47; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" END - { $$ = 48; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" END - { $$ = 49; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" END - { $$ = 50; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" END - { $$ = 51; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" END - { $$ = 52; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" END - { $$ = 53; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" END - { $$ = 54; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" END - { $$ = 55; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" END - { $$ = 56; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" END - { $$ = 57; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - END - { $$ = 58; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" END - { $$ = 59; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" END - { $$ = 60; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" END - { $$ = 61; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" END - { $$ = 62; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" END - { $$ = 63; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" END - { $$ = 64; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" END - { $$ = 65; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" END - { $$ = 66; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" END - { $$ = 67; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" END - { $$ = 68; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" END - { $$ = 69; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" END - { $$ = 70; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" END - { $$ = 71; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - END - { $$ = 72; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" END - { $$ = 73; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" END - { $$ = 74; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" END - { $$ = 75; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" END - { $$ = 76; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" END - { $$ = 77; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" END - { $$ = 78; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" END - { $$ = 79; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" END - { $$ = 80; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" END - { $$ = 81; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" END - { $$ = 82; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" END - { $$ = 83; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" END - { $$ = 84; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" END - { $$ = 85; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - END - { $$ = 86; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" END - { $$ = 87; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" END - { $$ = 88; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" END - { $$ = 89; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" END - { $$ = 90; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" END - { $$ = 91; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" END - { $$ = 92; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" END - { $$ = 93; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" END - { $$ = 94; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" END - { $$ = 95; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" END - { $$ = 96; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" END - { $$ = 97; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" END - { $$ = 98; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" END - { $$ = 99; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - END - { $$ = 100; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" END - { $$ = 101; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" END - { $$ = 102; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" END - { $$ = 103; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" END - { $$ = 104; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" END - { $$ = 105; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" END - { $$ = 106; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" END - { $$ = 107; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" END - { $$ = 108; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" END - { $$ = 109; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" END - { $$ = 110; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" END - { $$ = 111; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - END - { $$ = 112; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" END - { $$ = 113; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" END - { $$ = 114; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" END - { $$ = 115; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" END - { $$ = 116; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" END - { $$ = 117; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" END - { $$ = 118; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" END - { $$ = 119; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" END - { $$ = 120; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" END - { $$ = 121; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" END - { $$ = 122; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" END - { $$ = 123; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - END - { $$ = 124; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" END - { $$ = 125; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" END - { $$ = 126; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" END - { $$ = 127; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" END - { $$ = 128; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" END - { $$ = 129; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" END - { $$ = 130; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" END - { $$ = 131; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" END - { $$ = 132; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" END - { $$ = 133; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" END - { $$ = 134; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" END - { $$ = 135; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - END - { $$ = 136; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" END - { $$ = 137; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" END - { $$ = 138; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" END - { $$ = 139; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" END - { $$ = 140; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" END - { $$ = 141; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" END - { $$ = 142; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" END - { $$ = 143; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" END - { $$ = 144; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" END - { $$ = 145; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" END - { $$ = 146; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" END - { $$ = 147; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - END - { $$ = 148; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" END - { $$ = 149; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" END - { $$ = 150; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" END - { $$ = 151; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" END - { $$ = 152; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" END - { $$ = 153; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" END - { $$ = 154; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" END - { $$ = 155; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" END - { $$ = 156; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" END - { $$ = 157; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" END - { $$ = 158; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" END - { $$ = 159; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - END - { $$ = 160; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" END - { $$ = 161; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" END - { $$ = 162; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" END - { $$ = 163; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" END - { $$ = 164; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" END - { $$ = 165; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" END - { $$ = 166; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" END - { $$ = 167; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" END - { $$ = 168; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" END - { $$ = 169; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" END - { $$ = 170; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" END - { $$ = 171; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - END - { $$ = 172; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" END - { $$ = 173; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" END - { $$ = 174; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" END - { $$ = 175; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" END - { $$ = 176; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" END - { $$ = 177; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" END - { $$ = 178; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" END - { $$ = 179; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" END - { $$ = 180; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" END - { $$ = 181; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" END - { $$ = 182; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" END - { $$ = 183; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - END - { $$ = 184; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" END - { $$ = 185; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" END - { $$ = 186; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" END - { $$ = 187; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" END - { $$ = 188; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" END - { $$ = 189; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" END - { $$ = 190; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" END - { $$ = 191; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" END - { $$ = 192; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" END - { $$ = 193; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" END - { $$ = 194; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" END - { $$ = 195; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - END - { $$ = 196; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" END - { $$ = 197; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" END - { $$ = 198; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" "199" END - { $$ = 199; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" "199" "200" END - { $$ = 200; } -; -%% - - - - -/* A C error reporting function. */ -/* !POSIX */ static -void yyerror (const char *msg) -{ - fprintf (stderr, "%s\n", msg); -} -static int -yylex (void) -{ - static int inner = 1; - static int outer = 0; - if (outer > MAX) - return 0; - else if (inner > outer) - { - inner = 1; - ++outer; - return END; - } - return inner++; -} -#include /* getenv. */ -#include /* strcmp. */ -int -main (int argc, char const* argv[]) -{ - (void) argc; - (void) argv; - return yyparse (); -} -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1480: cat stderr -./calc.at:1477: cat stderr -./torture.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1477: $PREPARSER ./calc input -621. existing.at:808: testing GNU Cim Grammar: Canonical LR(1) ... - | (- *) + (1 2) = 1 -./calc.at:1478: cat stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -1.3: syntax error -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (10.16-11.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 -./calc.at:1478: $PREPARSER ./calc input -stderr: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error -stderr: -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: cat stderr -stderr: -1.3: syntax error -input: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -input: -./calc.at:1477: cat stderr - | (* *) + (*) + (*) -./calc.at:1480: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1478: cat stderr -stderr: -stderr: -stdout: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./types.at:139: $PREPARSER ./test +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) + | 1//2 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +618. existing.at:74: testing GNU AWK 3.1.0 Grammar: Canonical LR(1) ... +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -input: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | error -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1478: $PREPARSER ./calc input -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1477: $PREPARSER ./calc input -stderr: -======== Testing with C++ standard flags: '' -stderr: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: stderr: -1.1: syntax error +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +621. existing.at:808: testing GNU Cim Grammar: Canonical LR(1) ... +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +619. existing.at:808: testing GNU Cim Grammar: LALR(1) ... +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -1.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - stderr: +error: null divisor stderr: -1.1: syntax error -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -1.1: syntax error -./calc.at:1480: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +stderr: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +620. existing.at:808: testing GNU Cim Grammar: IELR(1) ... Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1492: cat stderr stderr: -./calc.at:1478: cat stderr -./calc.at:1477: cat stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) --> $$ = nterm exp (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) --> $$ = nterm exp (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) --> $$ = nterm exp (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) Entering state 19 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) Entering state 28 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) --> $$ = nterm exp (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) --> $$ = nterm exp (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1480: $PREPARSER ./calc input -input: - | 1 = 2 = 3 -./calc.at:1478: $PREPARSER ./calc input - | (* *) + (*) + (*) -stderr: -input: +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1485: cat stderr +540. calc.at:1455: ok stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 +Stack now 0 4 12 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Next token is token ')' () +Reducing stack by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Stack now 0 8 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Stack now 0 8 22 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 +Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (7) +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | error +./calc.at:1489: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) --> $$ = nterm exp (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1) - $2 = token '\n'./calc.at:1492: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1477: $PREPARSER ./calc input - () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) --> $$ = nterm exp (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) Entering state 19 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) Entering state 28 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) --> $$ = nterm exp (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) --> $$ = nterm exp (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -stderr: -1.7: syntax error -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (- *) + (1 2) = 1 +./calc.at:1480: $PREPARSER ./calc input +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: + | 1 2 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + input: -stderr: -1.7: syntax error +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 2 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1482: $PREPARSER ./calc input + | 1//2 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +./calc.at:1479: cat stderr +./calc.at:1486: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +622. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: LALR(1) ... Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 | 1 + 2 * 3 + !- ++ -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1485: $PREPARSER ./calc input -stdout: -stderr: +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input stderr: -./torture.at:497: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] stderr: stderr: +./calc.at:1448: cat stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 stderr: -1.7: syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Reading a token @@ -201819,199 +190628,268 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -1.7: syntax error -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: discarding lookahead token '/' () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./torture.at:497: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token ')' () +syntax error +Shifting token error () Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (1.5: ) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token '*' () +syntax error +Shifting token error () Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.11: ) +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 -./torture.at:500: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./torture.at:500: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: $PREPARSER ./calc /dev/null stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !- ++ +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -202021,72 +190899,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -stderr: -./torture.at:504: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -./types.at:139: ./check -stderr: -memory exhausted -memory exhausted -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1480: cat stderr -./torture.at:504: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -input: -./calc.at:1477: cat stderr -stderr: -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -input: - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1492: $PREPARSER ./calc input - | - | +1 -./calc.at:1478: $PREPARSER ./calc input -memory exhausted -memory exhausted -input: - | (#) + (#) = 2222 -input: -./calc.at:1480: $PREPARSER ./calc input -input: - | 1//2 stderr: stderr: -./torture.at:510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - | - | +1 -2.1: syntax error +./calc.at:1443: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -202098,72 +190913,21 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1451: cat stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +Starting parse +Entering state 0 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1477: $PREPARSER ./calc input -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () stderr: -2.1: syntax error stderr: Starting parse Entering state 0 @@ -202185,118 +190949,131 @@ Error: popping token '/' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '/' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 29 +Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1468: cat stderr +./calc.at:1469: cat stderr +input: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +input: +./calc.at:1479: cat stderr +input: + | (# + 1) = 1111 + | error +./calc.at:1479: $PREPARSER ./calc input +stderr: + | + | +1 +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) stderr: -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -2.1: syntax error -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1492: $PREPARSER ./calc input -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1477: cat stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -202306,248 +191083,583 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stdout: +./calc.at:1486: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + + | (#) + (#) = 2222 +stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 29 +Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 +Stack now 0 8 20 5 Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 13 -Reducing stack 0 by rule 18 (line 109): +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) +Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:510: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1480: cat stderr -./calc.at:1485: cat stderr stderr: -./calc.at:1478: $PREPARSER ./calc /dev/null -./calc.at:1477: cat stderr +stdout: +./calc.at:1451: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error: invalid character: '#' +stderr: +./calc.at:1489: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +syntax error, unexpected invalid token Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 29 +Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 +Stack now 0 8 20 5 Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 13 -Reducing stack 0 by rule 18 (line 109): +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) +Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1477: $PREPARSER ./calc /dev/null -1.1: syntax error -input: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -1.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -stderr: - | error -stdout: -stderr: -./calc.at:1485: $PREPARSER ./calc input -1.1: syntax error -./torture.at:538: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -./torture.at:538: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./calc.at:1476: cat stderr +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr stderr: stderr: -stdout: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +544. calc.at:1468: input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] + ok Starting parse Entering state 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -stderr: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stderr: -stderr: -./calc.at:1492: cat stderr -./torture.at:541: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -syntax error: invalid character: '#' -stdout: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1494: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. stderr: - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -input: Starting parse Entering state 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1477: "$PERL" -pi -e 'use strict; +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1489: $PREPARSER ./calc input +input: +input: +input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -202557,138 +191669,116 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./torture.at:541: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -stderr: - | (#) + (#) = 2222 -syntax error: invalid character: '#' +input: +./calc.at:1476: cat stderr + | 1 = 2 = 3 + | (* *) + (*) + (*) + | 1 + 2 * 3 + !+ ++ +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1469: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1445: cat stderr +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: $PREPARSER ./calc input -./torture.at:545: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -./calc.at:1478: cat stderr stderr: stderr: -input: -======== Testing with C++ standard flags: '' +syntax error, unexpected invalid token +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -memory exhausted -memory exhausted -./calc.at:1477: cat stderr -./torture.at:545: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1494: $PREPARSER ./calc input -input: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -202698,14 +191788,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -memory exhausted -memory exhausted +stdout: +./torture.at:497: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +stderr: +stdout: input: -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -202715,1850 +191805,1779 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./torture.at:538: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +input: +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 + | (- *) + (1 2) = 1 + | (* *) + (*) + (*) ./calc.at:1477: $PREPARSER ./calc input +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: $PREPARSER ./calc input stderr: -./torture.at:548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1480: cat stderr stderr: stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1485: cat stderr +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +syntax error, unexpected '=' +syntax error +syntax error +syntax error +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +533. calc.at:1445: 546. calc.at:1476: ok + ok +input: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +./calc.at:1480: cat stderr +./calc.at:1492: cat stderr +./calc.at:1446: cat stderr +stderr: +stderr: +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +syntax error +syntax error +syntax error +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 10 Reading a token -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) Entering state 30 -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) + $1 = token number (5) +-> $$ = nterm exp (5) Entering state 10 Reading a token -Next token is token '\n' (2.16-3.0: ) +Next token is token '\n' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 -Next token is token '\n' (2.16-3.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Reading a token -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 10 -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Reading a token -Next token is token '\n' (4.10-5.0: ) +Next token is token '\n' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Next token is token '\n' (4.10-5.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Reading a token -Next token is token ')' (5.4: ) +Next token is token ')' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Reading a token -Next token is token '=' (5.8: ) +Next token is token '=' () Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 + $1 = nterm exp (1) + $2 = token '\n' () +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 -Next token is token '\n' (7.10-8.0: ) +Reading a token +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) + $1 = token number (5) +-> $$ = nterm exp (5) Entering state 10 Reading a token -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) +Next token is token '=' () Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stdout: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -input: -input: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 = 2 = 3 - | (# + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input -stderr: -./calc.at:1480: $PREPARSER ./calc input -stderr: -./types.at:139: $PREPARSER ./test -./calc.at:1482: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -Starting parse -Entering state 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 -Next token is token '\n' (2.16-3.0: ) +Reading a token +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Reading a token -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (256) +Shifting token number (256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 -Next token is token '\n' (4.10-5.0: ) +Reading a token +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Reading a token -Next token is token '=' (5.8: ) +Next token is token '=' () Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (64) +Shifting token number (64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./torture.at:497: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Reading a token -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 10 -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) Entering state 10 -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Reading a token -Next token is token '\n' (7.10-8.0: ) +Next token is token '\n' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Next token is token '\n' (7.10-8.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 28 Reading a token -Next token is token '-' (9.7: ) +Next token is token '-' () Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Reading a token -Next token is token '=' (9.11: ) +Next token is token '=' () Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) + $1 = token number (4) +-> $$ = nterm exp (4) Entering state 10 Reading a token -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) Entering state 27 -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Reading a token -Next token is token ')' (10.11: ) +Next token is token ')' () Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 28 Reading a token -Next token is token '=' (10.13: ) +Next token is token '=' () Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Reading a token -Next token is token '=' (12.7: ) +Next token is token '=' () Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) Entering state 32 -Next token is token '=' (12.7: ) +./torture.at:538: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '=' () Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Reading a token -Next token is token ')' (13.5: ) +Next token is token ')' () Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Reading a token -Next token is token '=' (13.9: ) +Next token is token '=' () Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./calc.at:1492: cat stderr -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -204568,55 +193587,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 2 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +stdout: +stderr: +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -204626,170 +193602,54 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1487: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +623. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: IELR(1) ... +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y + + +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +624. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: Canonical LR(1) ... +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +625. regression.at:25: testing Trivial grammars ... +./regression.at:43: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -======== Testing with C++ standard flags: '' -stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:44: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./calc.at:1485: cat stderr +./calc.at:1482: cat stderr +./calc.at:1489: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1479: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) stderr: -./calc.at:1478: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./torture.at:548: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - | (1 + #) = 1111 -./calc.at:1492: $PREPARSER ./calc input -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./calc.at:1477: cat stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: stderr: stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) - | (!!) + (1 2) = 1 -./calc.at:1478: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -204797,11 +193657,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -204809,11 +193669,11 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token @@ -204835,11 +193695,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token @@ -204863,11 +193723,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -204875,11 +193735,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -204891,11 +193751,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token @@ -204927,11 +193787,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token @@ -204979,11 +193839,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -204991,11 +193851,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -205020,11 +193880,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -205063,11 +193923,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -205091,11 +193951,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -205110,11 +193970,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -205164,11 +194024,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -205198,11 +194058,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -205246,11 +194106,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -205258,11 +194118,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token @@ -205277,11 +194137,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -205300,11 +194160,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token @@ -205335,11 +194195,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -205351,11 +194211,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -205363,11 +194223,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -205399,11 +194259,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token @@ -205441,11 +194301,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -205453,11 +194313,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -205465,11 +194325,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -205491,11 +194351,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token @@ -205524,11 +194384,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -205536,11 +194396,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -205565,11 +194425,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -205584,11 +194444,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token @@ -205614,37 +194474,20 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (!!) + (1 2) = 1 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1477: $PREPARSER ./calc input +./torture.at:541: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +./torture.at:500: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -205652,11 +194495,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -205664,11 +194507,11 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token @@ -205690,11 +194533,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token @@ -205718,11 +194561,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -205730,11 +194573,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -205746,11 +194589,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token @@ -205782,11 +194625,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token @@ -205834,11 +194677,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -205846,11 +194689,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -205875,11 +194718,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -205918,11 +194761,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -205946,11 +194789,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -205965,11 +194808,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -206019,11 +194862,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -206053,11 +194896,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -206101,11 +194944,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -206113,11 +194956,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token @@ -206132,11 +194975,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -206155,11 +194998,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token @@ -206190,11 +195033,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -206206,11 +195049,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -206218,11 +195061,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -206254,11 +195097,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token @@ -206296,11 +195139,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -206308,11 +195151,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -206320,11 +195163,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -206346,11 +195189,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token @@ -206379,11 +195222,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -206391,11 +195234,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -206420,11 +195263,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -206439,11 +195282,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token @@ -206469,92 +195312,29 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; +syntax error, unexpected '=' +./calc.at:1443: cat stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1487: $PREPARSER ./calc input +input: +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -206564,9 +195344,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -206576,7 +195354,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -206587,28 +195375,38 @@ }eg ' expout || exit 77 input: -1.11: syntax error -1.1-16: error: 2222 != 1 +input: +./calc.at:1491: cat stderr +./torture.at:237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + | 1//2 stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -stdout: -./calc.at:1482: $PREPARSER ./calc input + | (* *) + (*) + (*) + | (#) + (#) = 2222 +./calc.at:1492: $PREPARSER ./calc input stderr: -./calc.at:1494: cat stderr -./types.at:139: ./check -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1480: cat stderr -./calc.at:1492: cat stderr +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1446: $PREPARSER ./calc input stderr: -stdout: -1.11: syntax error -1.1-16: error: 2222 != 1 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: +./torture.at:500: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; +stderr: +stderr: +syntax error +syntax error +syntax error +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./torture.at:541: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +input: +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -206618,37 +195416,59 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1487: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - - | - | +1 -./calc.at:1485: $PREPARSER ./calc input +input: +input: +stderr: +stderr: +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) + | error + | error + | 1 = 2 = 3 +./calc.at:1489: $PREPARSER ./calc input + | 1 2 + | 1 2 stderr: +./calc.at:1489: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1485: cat stderr +./calc.at:1479: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./torture.at:504: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 ./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -206659,65 +195479,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1485: $PREPARSER ./calc input +./torture.at:140: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./torture.at:545: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -input: -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] stderr: -input: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -input: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () Starting parse Entering state 0 Reading a token @@ -206729,231 +195508,300 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token Next token is token "number" (2) -syntax error +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) - | (# + 1) = 1111 - | (1 + # + 1) = 1111 -./calc.at:1486: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - - | 1//2 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input +Cleanup: discarding lookahead token '=' () stderr: -./calc.at:1478: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1477: cat stderr -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 -stderr: -input: +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./calc.at:1487: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token 1.2: syntax error: invalid character: '#' Next token is token error (1.2: ) -Shifting token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token ')' (1.7: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -./calc.at:1486: $PREPARSER ./calc input -input: -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 -syntax error: invalid character: '#' -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1478: $PREPARSER ./calc input -input: -stderr: stderr: -stderr: - | (- *) + (1 2) = 1 Starting parse Entering state 0 Reading a token @@ -207790,78 +196638,258 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1477: $PREPARSER ./calc input -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error ./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +stderr: +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +stderr: +input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] +input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] +input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] +input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] +input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] +input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] +input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] +input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] +input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] +input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] +input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] +input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +input: +input: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 +./calc.at:1448: cat stderr +input: +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: cat stderr +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 +stderr: +stderr: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1//2 + | 1 = 2 = 3 +memory exhausted +memory exhausted +./calc.at:1485: $PREPARSER ./calc input +stderr: +./calc.at:1479: $PREPARSER ./calc input +stderr: +./calc.at:1491: $PREPARSER ./calc input +stderr: +./calc.at:1479: cat stderr +memory exhausted +memory exhausted +./torture.at:545: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:504: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () Starting parse Entering state 0 Reading a token @@ -207882,14 +196910,6 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -stderr: -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -208238,7 +197258,13 @@ Entering state 24 Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1) - $2 = token '\n' () + $2 = token '\n'Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () + () -> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 83): @@ -208726,9 +197752,6 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 Starting parse Entering state 0 Reading a token @@ -208740,68 +197763,1334 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr +stdout: +./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 +./calc.at:1477: cat stderr +./calc.at:1482: cat stderr +stderr: +./calc.at:1486: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +stderr: +stderr: +stderr: +memory exhausted +memory exhausted +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +syntax error, unexpected '=' +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 29 +Entering state 30 Reading a token Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./torture.at:510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +input: +./regression.at:45: $CC $CFLAGS $CPPFLAGS -c -o input.o -DYYDEBUG -c input.c +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error + | (#) + (#) = 2222 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +stderr: +stderr: +stderr: +stderr: +./calc.at:1448: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +memory exhausted +memory exhausted +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1478: cat stderr +input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +input: +./torture.at:548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + | (!!) + (1 2) = 1 + | (* *) + (*) + (*) + | (1 + #) = 1111 +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1469: cat stderr +./calc.at:1477: $PREPARSER ./calc input +stderr: +./calc.at:1482: $PREPARSER ./calc input +stderr: + | 1 2 +./calc.at:1487: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '=' +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) -> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +1.6: syntax error: invalid character: '#' +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr +stderr: +stderr: +input: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +stderr: +1.6: syntax error: invalid character: '#' + | + | +1 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1480: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 +Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +1.6: syntax error: invalid character: '#' +input: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +stderr: + | 1 + 2 * 3 + !+ ++ + | 1 + 2 * 3 + !+ ++ + | error +syntax error, unexpected '+' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1492: $PREPARSER ./calc input +./torture.at:510: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1480: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./torture.at:548: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +stderr: +stderr: +stderr: +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -208810,6 +199099,90 @@ -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +syntax error, unexpected '+' +stderr: +stdout: +./calc.at:1494: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stderr: +stdout: +./torture.at:238: $PREPARSER ./input +input: +input: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | 1 + 2 * 3 + !+ ++ + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1443: cat stderr +./calc.at:1491: cat stderr +./calc.at:1485: cat stderr +./calc.at:1489: cat stderr +./calc.at:1489: $PREPARSER ./calc input +stderr: +./calc.at:1485: cat stderr +./calc.at:1446: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./torture.at:238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 @@ -208818,240 +199191,1441 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +input: +input: +input: +input: +input: +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +./calc.at:1482: cat stderr + | 1 = 2 = 3 + | + | +1 + | 1 + 2 * 3 + !- ++ + | error + | (1 + #) = 1111 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1486: cat stderr +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (!!) + (1 2) = 1 +./calc.at:1485: $PREPARSER ./calc input + | (1 + #) = 1111 +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1494: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1479: cat stderr +stderr: +./calc.at:1479: cat stderr +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 + | 1 + 2 * 3 + !- ++ +./calc.at:1487: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +stderr: +./calc.at:1446: $PREPARSER ./calc input +stderr: +stderr: +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 +stderr: +./calc.at:1476: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +605. torture.at:216: ok +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] +input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] +input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] +input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] +input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] +input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] +input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] +input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] +input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] +input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] +input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] +input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +input: +input: +input: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token -Next token is token ')' () +Next token is token ')' (5.4: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (5.8: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 16 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1448: cat stderr + | 1//2 +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | 1 = 2 = 3 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ + | 1//2 +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +./calc.at:1486: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () @@ -209060,513 +200634,1707 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) --> $$ = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +input: +stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./calc.at:1480: cat stderr +./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (1 + #) = 1111 +./calc.at:1451: cat stderr +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1479: $PREPARSER ./calc input +stderr: +syntax error, unexpected '+' +stderr: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1448: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +stderr: +stderr: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token -Next token is token '=' () +Next token is token ')' (5.4: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (stderr: +5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) Entering state 10 -Next token is token '=' () +Reading a token +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) Entering state 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')stderr: +' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 16 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc /dev/null +./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +input: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 8 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 +Next token is token '/' () +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1477: cat stderr +./calc.at:1482: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1486: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +stderr: +626. regression.at:55: testing YYSTYPE typedef ... +./regression.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stderr: +syntax error, unexpected '+' +stderr: +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Stack now 0 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +input: +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) + $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 12 +Entering state 27 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1478: cat stderr + | (# + 1) = 1111 +./calc.at:1451: $PREPARSER ./calc input + | 1 2 +./calc.at:1494: $PREPARSER ./calc input +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +stderr: +stderr: +input: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) + $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 30 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () $3 = nterm exp (2) --> $$ = nterm exp (4) +-> $$ = nterm exp (2) Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (4) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; +syntax error, unexpected end of input +input: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -209576,7 +202344,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -209586,8 +202355,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !+ ++ +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -209597,20 +202367,50 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1492: cat stderr stderr: -input: - | 1 2 -./calc.at:1487: $PREPARSER ./calc input stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 stderr: stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1485: cat stderr +1.2: syntax error: invalid character: '#' +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) Starting parse Entering state 0 Reading a token @@ -209622,283 +202422,498 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '/' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) +Cleanup: discarding lookahead token '/' () +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +./calc.at:1485: cat stderr +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Next token is token '=' () +Reading a token +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 30 -Next token is token '=' () +Reading a token +Next token is token '*' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) + $1 = nterm exp (1) $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 +Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (3333) $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1443: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +syntax error, unexpected end of input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr +1.2: syntax error: invalid character: '#' +stderr: +./calc.at:1491: cat stderr +stderr: +stderr: +1.11-17: error: null divisor +input: +input: +./calc.at:1480: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +./calc.at:1479: cat stderr +input: +./calc.at:1469: cat stderr + | error +./calc.at:1485: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1486: cat stderr + | 1 + 2 * 3 + !+ ++ +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) + | 1 = 2 = 3 +./calc.at:1492: $PREPARSER ./calc input +input: +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none + | (- *) + (1 2) = 1 +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 @@ -209907,614 +202922,823 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) + $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) --> $$ = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +input: +input: +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc /dev/null +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | (# + 1) = 1111 +stderr: + | (#) + (#) = 2222 +input: +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) Entering state 27 +Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1480: $PREPARSER ./calc input +stderr: +./calc.at:1443: $PREPARSER ./calc input +stderr: +./calc.at:1479: $PREPARSER ./calc /dev/null +input: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +Starting parse +Entering state 0 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1487: cat stderr +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +input: + | + | +1 +./calc.at:1446: $PREPARSER ./calc input + | 1 = 2 = 3 +input: +./regression.at:74: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +stderr: +./calc.at:1479: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1482: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | error + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: $PREPARSER ./calc input +stderr: +./calc.at:1494: cat stderr +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1486: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error, unexpected end of input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +stderr: +./calc.at:1448: cat stderr +stderr: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +Starting parse +Entering state 0 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +stderr: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | error +./calc.at:1487: $PREPARSER ./calc input +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: + | 1 + 2 * 3 + !- ++ +Starting parse +Entering state 0 Reading a token -Next token is token number (3) -Shifting token number (3) +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: $PREPARSER ./calc input + | (# + 1) = 1111 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (4) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (4) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1480: cat stderr -./calc.at:1492: cat stderr -./calc.at:1485: $PREPARSER ./calc /dev/null -stdout: -input: -./types.at:139: ./check -./calc.at:1482: cat stderr stderr: - | 1 2 -input: -./calc.at:1486: $PREPARSER ./calc input -input: - | (1 + # + 1) = 1111 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1494: cat stderr - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; +stderr: +stderr: +stderr: +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +./calc.at:1486: cat stderr +stderr: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token invalid token () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -210524,301 +203748,355 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + 1) / (1 - 1) -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' stderr: -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -stderr: - | error | 1//2 -./calc.at:1482: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () stderr: +syntax error, unexpected end of input ./calc.at:1494: $PREPARSER ./calc input +./calc.at:1480: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +535. calc.at:1448: | + | +1 Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -error: null divisor -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok stderr: +./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1487: cat stderr -stderr: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: cat stderr +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1477: cat stderr +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: +input: stderr: -error: null divisor Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +stderr: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) Entering state 22 Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: -input: - | 1//2 -./calc.at:1487: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1478: $PREPARSER ./calc input -input: -./calc.at:1486: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token invalid token () +./calc.at:1451: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (!!) + (1 2) = 1 +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -210838,83 +204116,113 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1477: $PREPARSER ./calc input -stderr: -stderr: Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input +stderr: +./calc.at:1477: cat stderr +./calc.at:1486: $PREPARSER ./calc input +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +syntax error: invalid character: '#' + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr + +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +627. regression.at:85: testing Early token definitions with --yacc ... +./regression.at:115: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc -o input.c input.y Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '/' () -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1486: cat stderr stderr: stderr: stderr: +input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) Entering state 22 Reading a token -Next token is token '/' () -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -1.2: syntax error -1.10: syntax error -1.16: syntax error -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -210925,17 +204233,21 @@ }eg ' expout || exit 77 ./calc.at:1485: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1489: cat stderr +./calc.at:1443: cat stderr +./calc.at:1489: cat stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -210945,9 +204257,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -input: -./calc.at:1478: "$PERL" -pi -e 'use strict; + | + | +1 +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -210957,49 +204278,207 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - | 1//2 -./calc.at:1486: $PREPARSER ./calc input -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1480: cat stderr -./calc.at:1494: cat stderr -stderr: -input: -input: +./calc.at:1487: cat stderr Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1482: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1487: cat stderr -555. calc.at:1480: ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +./regression.at:116: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +stderr: +input: + | (1 + # + 1) = 1111 +./calc.at:1480: cat stderr +./calc.at:1451: $PREPARSER ./calc input +stderr: +input: +input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +input: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 + | (#) + (#) = 2222 +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) + | (* *) + (*) + (*) +./calc.at:1477: $PREPARSER ./calc input ./calc.at:1485: $PREPARSER ./calc input - | 1 = 2 = 3 +./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' ./calc.at:1478: cat stderr -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; + | 1 = 2 = 3 +stderr: + | (1 + # + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input +stderr: +./calc.at:1443: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +./calc.at:1446: cat stderr +./calc.at:1469: cat stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -211009,12 +204488,137 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stdout: +input: +./calc.at:1482: cat stderr +input: +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () input: -./calc.at:1494: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - | error stderr: -./calc.at:1487: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +626. regression.at:55: ok +stderr: +./calc.at:1486: cat stderr input: stderr: Starting parse @@ -211028,16 +204632,28 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -input: +Cleanup: discarding lookahead token '=' () +1.6: syntax error: invalid character: '#' +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +stderr: Starting parse Entering state 0 Reading a token @@ -211045,10 +204661,79 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 @@ -211057,6 +204742,214 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stdout: +stderr: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: $PREPARSER ./calc /dev/null + | (1 + # + 1) = 1111 +stderr: +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1479: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +625. regression.at:25: ok +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '+' () @@ -211067,11 +204960,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -211079,16 +204972,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -211098,16 +204991,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) $2 = token '+' () $3 = nterm exp (1) @@ -211126,7 +205019,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -211134,7 +205027,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -211155,12 +205048,443 @@ Next token is token '*' () Error: discarding token '*' () Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1494: cat stderr + + +input: +input: +input: +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + | (1 + #) = 1111 + | (#) + (#) = 2222 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () + | + | +1 +./calc.at:1491: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 + | (* *) + (*) + (*) +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1476: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 265 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +' expout || exit 77 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -211175,10 +205499,10 @@ Reading a token Next token is token '+' () Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '+' () Shifting token '+' () @@ -211188,41 +205512,8 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) Shifting token error () Entering state 11 Next token is token '*' () @@ -211240,38 +205531,18 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '\n' () Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) + $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -211285,171 +205556,268 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: stderr: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none - | error -./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1478: $PREPARSER ./calc input -stdout: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -Starting parse -Entering state 0 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token invalid token () -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token invalid token () -./calc.at:1477: cat stderr -stderr: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +628. regression.at:127: testing Early token definitions without --yacc ... +./regression.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y Starting parse Entering state 0 Reading a token @@ -211464,7 +205832,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -211479,11 +205847,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -211491,16 +205859,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -211510,16 +205878,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) $2 = token '+' () $3 = nterm exp (1) @@ -211538,7 +205906,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -211546,7 +205914,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -211578,7 +205946,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -211586,7 +205954,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -211600,11 +205968,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -211612,16 +205980,16 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 30 Reading a token Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1) $2 = token '*' () $3 = nterm exp (2) @@ -211645,7 +206013,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -211653,7 +206021,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) @@ -211663,16 +206031,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) @@ -211682,36 +206050,50 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./torture.at:513: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 stderr: +stdout: +./torture.at:551: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () - -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +input: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: cat stderr +629. regression.at:173: testing Braces parsing ... +./regression.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1480: cat stderr + | (1 + # + 1) = 1111 +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 +./torture.at:141: $PREPARSER ./input + | error +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -211721,195 +206103,171 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +input: +' expout || exit 77 stderr: -./torture.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 -Reading a token -Next token is token invalid token () -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token invalid token () -Starting parse -Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1486: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1477: $PREPARSER ./calc input -stderr: + | (1 + #) = 1111 Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -input: -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1478: $PREPARSER ./calc input -stderr: +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () stderr: -stdout: -stdout: -./torture.at:515: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -./types.at:139: $PREPARSER ./test -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +./calc.at:1476: $PREPARSER ./calc input +syntax error: invalid character: '#' +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./regression.at:187: grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -211919,17 +206277,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $PREPARSER ./test -input: -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:162: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -stdout: - | error -./calc.at:1486: $PREPARSER ./calc input -./torture.at:551: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -211939,144 +206292,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./torture.at:515: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: cat stderr -./calc.at:1492: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1487: cat stderr -======== Testing with C++ standard flags: '' -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1485: cat stderr -input: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror stderr: -./torture.at:517: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 - | 1 + 2 * 3 + !- ++ ./torture.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: $PREPARSER ./calc input -567. calc.at:1492: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -input: -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -stderr: - | 1 = 2 = 3 -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: - | - | +1 -./calc.at:1494: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -./torture.at:553: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1487: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1485: $PREPARSER ./calc input -memory exhausted -memory exhausted -stderr: -./torture.at:517: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1482: cat stderr stderr: stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./torture.at:553: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +stdout: +./torture.at:513: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 stderr: +syntax error: invalid character: '#' stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -memory exhausted -memory exhausted + { tests = {{{{{{{{{{}}}}}}}}}}; } +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: -input: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -212084,23 +206316,22 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () +Next token is token '*' () +syntax error Shifting token error () Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -212115,102 +206346,86 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token '*' () +syntax error Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | 1 = 2 = 3 -stderr: - -./calc.at:1482: $PREPARSER ./calc input -614. torture.at:485: ok -./calc.at:1478: cat stderr -./torture.at:555: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -622. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: LALR(1) ... -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: -stderr: stderr: +syntax error: invalid character: '#' stderr: Starting parse Entering state 0 @@ -212218,7 +206433,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -212230,48 +206445,24 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '=' () -syntax error +syntax error, unexpected '=' Error: popping nterm exp (2) Error: popping token '=' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '=' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +input: +stderr: +./torture.at:553: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +./calc.at:1482: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Reading a token @@ -212291,9 +206482,7 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -memory exhausted -memory exhausted -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -212303,19 +206492,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./torture.at:555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -./calc.at:1486: cat stderr +629. regression.at:173: ok + | (!!) + (1 2) = 1 +./calc.at:1480: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -212323,25 +206516,204 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '=' () -syntax error +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) Error: popping nterm exp (2) Error: popping token '=' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '=' () -memory exhausted -memory exhausted +stdout: +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +stderr: +./torture.at:141: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +stderr: +627. regression.at:85: ok +./torture.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + +stderr: +stdout: +./calc.at:1491: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +630. regression.at:196: testing Rule Line Numbers ... +./regression.at:232: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -v input.y +631. regression.at:345: testing Mixing %token styles ... +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -Wall -o input.c input.y +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +' expout || exit 77 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +stderr: +./regression.at:235: cat input.output +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./torture.at:553: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +stderr: +./torture.at:515: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +604. torture.at:132: ok +syntax error: invalid character: '#' +syntax error, unexpected number +error: 2222 != 1 +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Werror +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +./calc.at:1477: cat stderr +stderr: +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) input: -./calc.at:1494: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1492: $PREPARSER ./calc /dev/null +./torture.at:515: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +630. regression.at:196: Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () + ok +./torture.at:555: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1491: $PREPARSER ./calc input +632. regression.at:437: testing Token definitions: parse.error=detailed ... +stderr: +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +stdout: +628. regression.at:127: ok + + +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +633. regression.at:438: testing Token definitions: parse.error=verbose ... +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +634. regression.at:447: testing Characters Escapes ... +./regression.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +input: +input: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -212352,273 +206724,933 @@ }eg ' expout || exit 77 input: -615. torture.at:531: ok - | 1 = 2 = 3 +./calc.at:1487: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: cat stderr +./calc.at:1485: cat stderr +./calc.at:1485: cat stderr +./calc.at:1489: cat stderr +./calc.at:1479: cat stderr + | + | +1 + | (- *) + (1 2) = 1 + | (1 + 1) / (1 - 1) ./calc.at:1486: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1478: $PREPARSER ./calc input stderr: +./calc.at:1489: $PREPARSER ./calc input +stderr: +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 +stderr: +./calc.at:1451: $PREPARSER ./calc input +memory exhausted +memory exhausted Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +syntax error, unexpected number +error: 2222 != 1 +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -Starting parse -Entering state 0 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: cat stderr -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1477: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -624. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: Canonical LR(1) ... -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -623. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: IELR(1) ... -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -input: -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none - | (#) + (#) = 2222 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (14.1: ) +Entering state 16 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1487: cat stderr +input.y:3.1-5: error: useless precedence and associativity for "||" [-Werror=precedence] +input.y:3.1-5: error: useless precedence and associativity for "<=" [-Werror=precedence] +./regression.at:466: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +stderr: +./torture.at:517: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 +stderr: +stderr: +stdout: stdout: +./existing.at:74: $PREPARSER ./input +./existing.at:74: $PREPARSER ./input stderr: -./calc.at:1494: $PREPARSER ./calc /dev/null -./types.at:139: ./check -./calc.at:1485: "$PERL" -pi -e 'use strict; +stderr: +input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:25.8-14: note: previous declaration + 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:25.8-14: note: previous declaration + 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +./calc.at:1478: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +input: my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -stdout: -./calc.at:1491: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -stderr: -stdout: -./types.at:139: ./check -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -625. regression.at:25: testing Trivial grammars ... -stderr: -./regression.at:43: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - | - | +1 -./calc.at:1487: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./calc.at:1485: cat stderr -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; +./regression.at:357: sed 's,.*/$,,' stderr 1>&2 +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -212628,105 +207660,161 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./torture.at:555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr + | (1 + #) = 1111 +./calc.at:1486: cat stderr +./calc.at:1477: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 +1.11-17: error: null divisor Starting parse Entering state 0 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Next token is token '+' () -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +syntax error, unexpected '+' Error: popping nterm input () Cleanup: discarding lookahead token '+' () -input: -stderr: Starting parse Entering state 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -stderr: -./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1491: $PREPARSER ./calc input -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -./regression.at:44: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c - | (- *) + (1 2) = 1 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y - | - | +1 -./calc.at:1482: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -stderr: -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -212734,11 +207822,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -212746,11 +207834,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -212772,11 +207860,11 @@ Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) + $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token @@ -212800,11 +207888,11 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) + $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token @@ -212812,11 +207900,11 @@ Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) + $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token @@ -212828,11 +207916,11 @@ Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) + $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token @@ -212864,11 +207952,11 @@ Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) + $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token @@ -212916,11 +208004,11 @@ Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) + $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token @@ -212928,11 +208016,11 @@ Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) + $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token @@ -212957,11 +208045,11 @@ Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) + $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token @@ -213000,11 +208088,11 @@ Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) + $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token @@ -213028,11 +208116,11 @@ Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) + $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token @@ -213047,11 +208135,11 @@ Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) + $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token @@ -213101,11 +208189,11 @@ Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) + $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token @@ -213135,11 +208223,11 @@ Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) + $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token @@ -213183,11 +208271,11 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) + $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token @@ -213195,11 +208283,11 @@ Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) + $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token @@ -213214,11 +208302,11 @@ Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) + $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token @@ -213237,11 +208325,11 @@ Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) + $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token @@ -213272,11 +208360,11 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) + $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token @@ -213288,11 +208376,11 @@ Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) + $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token @@ -213300,11 +208388,11 @@ Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) + $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token @@ -213336,11 +208424,11 @@ Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) + $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token @@ -213378,11 +208466,11 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) + $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token @@ -213390,11 +208478,11 @@ Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) + $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token @@ -213402,11 +208490,11 @@ Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) + $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token @@ -213428,11 +208516,11 @@ Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) + $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token @@ -213461,11 +208549,11 @@ Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) + $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token @@ -213473,11 +208561,11 @@ Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) + $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token @@ -213502,11 +208590,11 @@ Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) + $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token @@ -213521,11 +208609,11 @@ Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) + $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token @@ -213551,12 +208639,41 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 -Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1478: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; +./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +input: +input: +./regression.at:437: sed 's,.*/$,,' stderr 1>&2 +input: +./regression.at:438: sed 's,.*/$,,' stderr 1>&2 +./calc.at:1491: cat stderr + | 1 + 2 * 3 + !+ ++ + | (1 + 1) / (1 - 1) +stderr: +stderr: +./calc.at:1443: $PREPARSER ./calc input +stderr: +./calc.at:1485: $PREPARSER ./calc input + | + | +1 +./calc.at:1487: $PREPARSER ./calc input +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +syntax error, unexpected '*', expecting NEWLINE or '{' or ';' +memory exhausted +memory exhausted +stderr: +./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +memory exhausted +615. torture.at:531: ok + +635. regression.at:480: testing Web2c Report ... +./regression.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v input.y +input: +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -213566,8 +208683,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -213577,9 +208698,136 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (!!) + (1 2) = 1 + | (!!) + (1 2) = 1 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1479: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1485: $PREPARSER ./calc input stderr: stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +1.6: syntax error: invalid character: '#' +./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./regression.at:506: cat input.output +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +stderr: +stdout: +./calc.at:1492: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=error +input: +./torture.at:517: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: cat stderr +./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + | (1 + #) = 1111 +./calc.at:1478: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +./calc.at:1491: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) Starting parse Entering state 0 Reading a token @@ -213595,7 +208843,7 @@ syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 9 -Reducing stack 0 by rule 15 (line 119): +Reducing stack 0 by rule 15 (line 106): $1 = token '-' () $2 = token error () Shifting token error () @@ -213608,6 +208856,431 @@ Next token is token ')' () Shifting token ')' () Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +617. existing.at:74: ok +./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y + +636. regression.at:661: testing Web2c Actions ... +./regression.at:674: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +input: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +./calc.at:1480: cat stderr +input: + | + | +1 + | 1 2 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +./calc.at:1491: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1446: $PREPARSER ./calc input +stderr: +stderr: +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +stderr: +stderr: +./regression.at:679: cat tables.c +1.11-17: error: null divisor +stderr: +memory exhausted +memory exhausted +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +syntax error, unexpected number +error: 2222 != 1 +635. regression.at:480: ok +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () @@ -213631,13 +209304,158 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token Next token is token number (2) -Error: discarding token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -213653,10 +209471,10 @@ Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) + $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () @@ -213672,58 +209490,635 @@ Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +614. torture.at:485: 616. existing.at:74: ok + ok +input: +stderr: +stdout: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1492: $PREPARSER ./calc input +634. regression.at:447: ok + + + + +637. regression.at:812: testing Useless Tokens ... +639. regression.at:1144: testing Dancer %glr-parser ... +./regression.at:912: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -o input.c input.y +./regression.at:1144: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y +638. regression.at:1143: testing Dancer ... +./regression.at:1143: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y +640. regression.at:1145: testing Dancer lalr1.cc ... +./regression.at:1145: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.cc dancer.y +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1469: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1479: cat stderr +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Wnone,none -Werror --trace=none +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./regression.at:917: cat tables.c +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +636. regression.at:661: stderr: + ok +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -213731,11 +210126,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -213743,11 +210138,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -213769,11 +210164,11 @@ Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) + $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token @@ -213797,11 +210192,11 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) + $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token @@ -213809,11 +210204,11 @@ Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) + $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token @@ -213825,11 +210220,11 @@ Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) + $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token @@ -213861,11 +210256,11 @@ Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) + $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token @@ -213913,11 +210308,11 @@ Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) + $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token @@ -213925,11 +210320,11 @@ Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) + $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token @@ -213954,11 +210349,11 @@ Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) + $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token @@ -213997,11 +210392,11 @@ Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) + $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token @@ -214025,11 +210420,11 @@ Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) + $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token @@ -214044,11 +210439,11 @@ Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) + $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token @@ -214098,11 +210493,11 @@ Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) + $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token @@ -214132,11 +210527,11 @@ Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) + $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token @@ -214180,11 +210575,11 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) + $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token @@ -214192,11 +210587,11 @@ Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) + $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token @@ -214211,11 +210606,11 @@ Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) + $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token @@ -214234,11 +210629,11 @@ Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) + $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token @@ -214269,11 +210664,11 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) + $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token @@ -214285,11 +210680,11 @@ Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) + $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token @@ -214297,11 +210692,11 @@ Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) + $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token @@ -214333,11 +210728,11 @@ Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) + $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token @@ -214375,11 +210770,11 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) + $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token @@ -214387,11 +210782,11 @@ Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) + $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token @@ -214399,11 +210794,11 @@ Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) + $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token @@ -214425,11 +210820,11 @@ Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) + $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token @@ -214458,11 +210853,11 @@ Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) + $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token @@ -214470,11 +210865,11 @@ Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) + $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token @@ -214499,11 +210894,11 @@ Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) + $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token @@ -214518,11 +210913,11 @@ Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) + $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token @@ -214548,27 +210943,49 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 -Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +641. regression.at:1220: testing Expecting two tokens ... +./regression.at:1220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - | (1 + #) = 1111 -./calc.at:1486: cat stderr -./calc.at:1478: $PREPARSER ./calc input -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./calc.at:1494: cat stderr input: -./calc.at:1477: cat stderr +input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +input: +./regression.at:1145: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o dancer dancer.cc $LIBS +./regression.at:1143: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS +./regression.at:1144: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS +./calc.at:1480: cat stderr + | 1 = 2 = 3 + | 1 + 2 * 3 + !+ ++ + | (# + 1) = 1111 + | (# + 1) = 1111 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | 1 2 +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: $PREPARSER ./calc input stderr: -./calc.at:1491: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' +stderr: +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +stderr: +syntax error, unexpected number +error: 2222 != 1 Starting parse Entering state 0 Reading a token @@ -214576,23 +210993,9 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -214620,36 +211023,9 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token number (1) Shifting token number (1) @@ -214657,815 +211033,43 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1487: cat stderr - | - | +1 -./calc.at:1486: $PREPARSER ./calc input -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -input: - | (1 + #) = 1111 -./calc.at:1477: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -stderr: -stderr: -stdout: -./calc.at:1487: $PREPARSER ./calc /dev/null -1.6: syntax error: invalid character: '#' -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token end of file () -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token end of file () -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1482: cat stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: - | (* *) + (*) + (*) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1482: $PREPARSER ./calc /dev/null -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1491: cat stderr -stdout: -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1489: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./calc.at:1478: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -215499,6 +211103,12 @@ Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -215511,387 +211121,79 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '\n' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: -./calc.at:1486: cat stderr - | (# + 1) = 1111 -stderr: -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: cat stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1486: $PREPARSER ./calc /dev/null - | 1//2 -./calc.at:1491: $PREPARSER ./calc input -stderr: -stderr: -stderr: -======== Testing with C++ standard flags: '' -stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 +Entering state 12 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (1) - $2 = token '^' () + $2 = token '*' () $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () @@ -215900,320 +211202,220 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +637. regression.at:812: ok + +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:1220: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS +stderr: +stderr: +stderr: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1485: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 22 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 22 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +syntax error: invalid character: '#' +Starting parse +Entering state 0 Reading a token Next token is token '\n' () Shifting token '\n' () @@ -216221,1515 +211423,933 @@ Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +Starting parse +Entering state 0 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) + $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 +Entering state 29 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) + $1 = token "number" (3) -> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (64) -Shifting token number (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Entering state 30 Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1487: $PREPARSER ./calc input -stdout: -stderr: -stderr: -stderr: -stderr: -./calc.at:1494: cat stderr -./calc.at:1477: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token -Next token is token ')' () +Next token is token ')' (5.4: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (5.8: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (5.11-6.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () --> $$ = nterm line () + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) Entering state 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) Entering state 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token -Next token is token '-' () +Next token is token '-' (9.7: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token -Next token is token '=' () +Next token is token '=' (9.11: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (9.15-10.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (9.15-10.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) Entering state 19 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token -Next token is token ')' () +Next token is token ')' (10.11: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) Entering state 28 Reading a token -Next token is token '=' () +Next token is token '=' (10.13: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (10.16-11.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (12.7: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) Entering state 32 -Next token is token '=' () +Next token is token '=' (12.7: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (12.12-13.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token -Next token is token ')' () +Next token is token ')' (13.5: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (13.9: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of input (14.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -1.2: syntax error: invalid character: '#' -input: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +syntax error: invalid character: '#' +./existing.at:808: grep '^State.*conflicts:' input.output +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +642. regression.at:1221: testing Expecting two tokens %glr-parser ... +./regression.at:1221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stderr: +input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' input: - | (# + 1) = 1111 - | 1 2 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1489: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./torture.at:237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: input: -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - | (!!) + (1 2) = 1 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1494: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -1.2: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: cat stderr ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -217740,9 +212360,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -217752,439 +212373,61 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr + | (# + 1) = 1111 + | (- *) + (1 2) = 1 + | (1 + 1) / (1 - 1) +./calc.at:1476: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input stderr: -./calc.at:1485: cat stderr -stdout: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -stderr: -./calc.at:1478: cat stderr -./calc.at:1486: cat stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -stdout: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1487: cat stderr -./calc.at:1477: cat stderr -stderr: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1485: $PREPARSER ./calc input -stdout: - | (1 + # + 1) = 1111 -./types.at:139: $PREPARSER ./test -input: -stderr: -input: -stdout: -input: -./calc.at:1478: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -stderr: - | (!!) + (1 2) = 1 -stderr: -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1487: $PREPARSER ./calc input +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1486: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) Starting parse Entering state 0 Reading a token @@ -218242,369 +212485,218 @@ Shifting token '!' () Entering state 5 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): $1 = token '!' () - $2 = token '+' () + $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:45: $CC $CFLAGS $CPPFLAGS -c -o input.o -DYYDEBUG -c input.c +./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=none -Werror --trace=none +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 2 +./regression.at:1221: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS +./calc.at:1492: $PREPARSER ./calc input +stderr: stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -1.6: syntax error: invalid character: '#' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stderr: -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1491: cat stderr -input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] -input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] -input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] -input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] -input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] -input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] -input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] -input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] -input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] -input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] -input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] -input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +syntax error: invalid character: '#' +./existing.at:808: grep '^State.*conflicts:' input.output +error: null divisor Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +stdout: +./calc.at:1494: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: cat stderr +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1451: cat stderr +./calc.at:1477: cat stderr +./calc.at:1486: cat stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1492: $PREPARSER ./calc input +stderr: +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) +input: +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !- ++ +./calc.at:1482: $PREPARSER ./calc input stderr: stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +stderr: +./calc.at:1489: $PREPARSER ./calc /dev/null +./calc.at:1487: $PREPARSER ./calc /dev/null +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 Starting parse Entering state 0 Reading a token @@ -218662,538 +212754,186 @@ Shifting token '!' () Entering state 5 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): $1 = token '!' () - $2 = token '+' () + $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stdout: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1479: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +syntax error: invalid character: '#' +error: null divisor +537. calc.at:1451: ok +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1494: $PREPARSER ./calc input -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 stderr: +input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] +stdout: +./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +643. regression.at:1222: testing Expecting two tokens lalr1.cc ... +./regression.at:1222: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.cc expect2.y +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:438: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS input: -stderr: -stderr: -1.6: syntax error: invalid character: '#' +./regression.at:437: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | error +./calc.at:1446: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: cat stderr +./calc.at:1489: cat stderr + | (* *) + (*) + (*) +./calc.at:1486: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +stderr: +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) + $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '*' () +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) + $1 = nterm exp (2) $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '!' () Shifting token '!' () Entering state 5 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1491: $PREPARSER ./calc input -stderr: -./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -stderr: -stdout: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: $PREPARSER ./calc input -./types.at:139: ./check -stderr: + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) Starting parse Entering state 0 Reading a token @@ -219441,456 +213181,241 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -./calc.at:1479: $PREPARSER ./calc input -./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1489: cat stderr -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token ')' () -syntax error -Shifting token error () +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.17: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) Entering state 12 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) Entering state 20 Reading a token -Next token is token ')' () -syntax error -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.20: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -Next token is token '*' () -syntax error -Shifting token error () +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) Reading a token -Next token is token ')' () +Next token is token ')' (1.28: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.30: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 21 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 30 Reading a token -Next token is token '*' () +Next token is token '*' (1.39: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) Entering state 21 Reading a token -Next token is token '*' () -syntax error -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 29 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.44: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -219903,134 +213428,7 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr -stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1487: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -stdout: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -input: -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $PREPARSER ./test -input: -input: - | 1 2 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1486: cat stderr - | 1//2 -input: -./calc.at:1479: $PREPARSER ./calc input -stderr: - | (- *) + (1 2) = 1 -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -220040,93 +213438,56 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1487: $PREPARSER ./calc input -./calc.at:1494: cat stderr -./calc.at:1489: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1491: cat stderr ./calc.at:1491: cat stderr stderr: stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -syntax error, unexpected number -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -======== Testing with C++ standard flags: '' -1.11-17: error: null divisor -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - | (!!) + (1 2) = 1 -input: -./calc.at:1486: $PREPARSER ./calc input -input: -stderr: -stderr: -./calc.at:1482: cat stderr - | (- *) + (1 2) = 1 -./calc.at:1494: $PREPARSER ./calc input +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) stderr: Starting parse Entering state 0 Reading a token +Now at end of input. +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token end of file () +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -220141,136 +213502,32 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -syntax error, unexpected number - | 1 = 2 = 3 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 @@ -220279,21 +213536,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -220307,38 +213555,18 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '\n' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -220348,128 +213576,89 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +534. calc.at:1446: ok +1.2: syntax error: invalid character: '#' +532. calc.at:1443: ok stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -220477,792 +213666,787 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -input: -Starting parse -Entering state 0 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: - | (!!) + (1 2) = 1 -./calc.at:1482: $PREPARSER ./calc input -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (5.11-6.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: -stdout: - | (#) + (#) = 2222 -./calc.at:1485: $PREPARSER ./calc input -stderr: -./calc.at:1477: cat stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: ./check -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -Starting parse -Entering state 0 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -stderr: -Starting parse -Entering state 0 +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (10.16-11.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1478: cat stderr -input: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1477: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -551. calc.at:1478: ok -./calc.at:1487: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input (14.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) + +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +644. regression.at:1230: testing Braced code in declaration in rules section ... +./regression.at:1261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +645. regression.at:1291: testing String alias declared after use ... +./regression.at:1304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; +input: +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -221272,997 +214456,885 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1479: cat stderr +./regression.at:1222: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o expect2 expect2.cc $LIBS +./calc.at:1485: cat stderr +./calc.at:1486: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1485: cat stderr +631. regression.at:345: ok +645. regression.at:1291: ok + stderr: +input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] +stdout: +./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +646. regression.at:1314: testing Extra lookahead sets in report ... +./regression.at:1329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +stderr: +stderr: +./calc.at:1486: $PREPARSER ./calc /dev/null +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -1.11-17: error: null divisor -./calc.at:1491: cat stderr -input: -input: -./calc.at:1486: cat stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1494: cat stderr - | 1//2 -./calc.at:1487: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: $PREPARSER ./calc input -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -1.11-17: error: null divisor -input: -stderr: -input: - | - | +1 -./calc.at:1491: $PREPARSER ./calc input -input: -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1486: $PREPARSER ./calc input -stderr: -./calc.at:1489: $PREPARSER ./calc input -syntax error, unexpected '/', expecting number or '-' or '(' or '!' - | (* *) + (*) + (*) -./calc.at:1494: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -Starting parse -Entering state 0 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1482: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $PREPARSER ./test -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1485: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (10.16-11.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: cat stderr -stderr: -stderr: -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (12.12-13.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input (14.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +stderr: Starting parse Entering state 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1477: cat stderr - | (- *) + (1 2) = 1 -input: -./calc.at:1482: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1485: $PREPARSER ./calc input -stdout: -626. regression.at:55: testing YYSTYPE typedef ... -./types.at:139: $PREPARSER ./test -./regression.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./existing.at:74: $PREPARSER ./input -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -======== Testing with C++ standard flags: '' -./calc.at:1487: $PREPARSER ./calc input -stderr: -./calc.at:1491: cat stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Now at end of input. +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token end of file () Starting parse Entering state 0 Reading a token @@ -222270,23 +215342,9 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () +Next token is token ')' () syntax error Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -222314,36 +215372,9 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -222351,235 +215382,82 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -549. calc.at:1477: ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1479: cat stderr -stdout: -./calc.at:1491: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: ./check -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -./calc.at:1489: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 -Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token ')' () +syntax error Error: popping token '+' () -Error: popping nterm exp (1) +Error: popping nterm exp (3) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) - $2 = token '=' () + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token '*' () syntax error Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -222590,9 +215468,16 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token @@ -222608,13 +215493,37 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () syntax error -Error: popping nterm exp (1) +Error: popping token '*' () +Error: popping nterm exp (2) Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -222630,10 +215539,10 @@ Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) + $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () @@ -222649,17 +215558,17 @@ Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -222673,355 +215582,14 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1494: cat stderr -stderr: -stderr: -617. existing.at:74: ok -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | 1 + 2 * 3 + !- ++ -input: -./calc.at:1487: $PREPARSER ./calc input -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: cat stderr - | error -input: -input: - | 1 + 2 * 3 + !+ ++ - | 1 = 2 = 3 -stderr: -./calc.at:1479: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1494: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - - | (* *) + (*) + (*) -syntax error, unexpected invalid token -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: $PREPARSER ./calc input - -stderr: -syntax error, unexpected invalid token Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -223029,11 +215597,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -223041,23 +215609,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -223074,52 +215642,12 @@ Next token is token '-' () Shifting token '-' () Entering state 13 -Reducing stack 0 by rule 18 (line 122): +Reducing stack 0 by rule 18 (line 109): $1 = token '!' () $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -223231,48 +215759,39 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1482: cat stderr -./regression.at:74: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./calc.at:1491: cat stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +./regression.at:1330: sed -n '/^State 1$/,/^State 2$/p' input.output +647. regression.at:1355: testing Token number in precedence declaration ... +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y stderr: +stdout: +./regression.at:1143: $PREPARSER ./dancer +stderr: +stdout: +./regression.at:1220: $PREPARSER ./expect2 +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror +input: +input: +./regression.at:1262: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -223283,288 +215802,270 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | 1//2 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1480: cat stderr + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.20: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token ')' () +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Reading a token -Next token is token '\n' () +Next token is token '+' (1.30: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: -input: -input: -input: - | 1 + 2 * 3 + !- ++ - | (* *) + (*) + (*) - | (# + 1) = 1111 -stderr: -./calc.at:1482: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1487: cat stderr -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1485: $PREPARSER ./calc input -stdout: -./calc.at:1479: cat stderr -./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -stderr: -./torture.at:238: $PREPARSER ./input -Starting parse -Entering state 0 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 Reading a token -Next token is token ')' () +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -223572,16 +216073,22 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -223602,12 +216109,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 @@ -223621,52 +216137,38 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -223680,11 +216182,79 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: + | (- *) + (1 2) = 1 +./calc.at:1494: cat stderr + | 1 2 +./calc.at:1479: $PREPARSER ./calc input +stderr: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1494: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1485: $PREPARSER ./calc input + | (# + 1) = 1111 +syntax error, unexpected ':' +./calc.at:1478: $PREPARSER ./calc input +stderr: +syntax error, unexpected '+', expecting A or B +stderr: +./regression.at:1143: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1220: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +stderr: +646. regression.at:1314: ok Starting parse Entering state 0 Reading a token @@ -223932,12 +216502,35 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (#) + (#) = 2222 -./calc.at:1494: $PREPARSER ./calc input + +stderr: +stdout: +./existing.at:808: $PREPARSER ./input +stderr: +input.y:24.5-19: error: rule useless in parser due to conflicts [-Werror=other] +input.y:28.5-19: error: rule useless in parser due to conflicts [-Werror=other] +input.y:18.1-5: error: useless precedence and associativity for TK1 [-Werror=precedence] +648. regression.at:1408: testing parse-gram.y: LALR = IELR ... +./calc.at:1469: cat stderr +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS input: -./calc.at:1487: $PREPARSER ./calc input - | 1 = 2 = 3 +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +./calc.at:1492: cat stderr +./calc.at:1479: cat stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (1 + # + 1) = 1111 +stderr: +./regression.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=lalr input.y +stderr: +./calc.at:1480: $PREPARSER ./calc input stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +stderr: +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -223945,16 +216538,22 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -223975,12 +216574,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 @@ -223994,52 +216602,38 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -224054,86 +216648,118 @@ Cleanup: popping token "end of input" () Cleanup: popping nterm input () stderr: -./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1489: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -224141,18 +216767,52 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token Next token is token '+' () -Error: discarding token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) -Error: discarding token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -224164,32 +216824,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -224203,13 +216871,127 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () +641. regression.at:1220: ok +638. regression.at:1143: ok +./calc.at:1482: cat stderr + + +./regression.at:1388: sed 's,.*/$,,' stderr 1>&2 +stderr: +649. regression.at:1430: testing parse.error=verbose and YYSTACK_USE_ALLOCA ... +650. regression.at:1504: testing parse.error=verbose overflow ... +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1604: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./regression.at:1481: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./regression.at:437: $PREPARSER ./input +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +' expout || exit 77 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | + | +1 +./calc.at:1494: $PREPARSER ./calc input stderr: -605. torture.at:216: ok ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +552. calc.at:1479: ok + +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:1482: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:1611: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + # + 1) = 1111 +stderr: +stderr: +stderr: +./calc.at:1469: $PREPARSER ./calc input +syntax error, unexpected a, expecting ∃¬∩∪∀ +1.2: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -224454,6 +217236,192 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) +620. existing.at:808: ok + +stderr: +stdout: +stderr: +./existing.at:808: $PREPARSER ./input +stdout: +./regression.at:438: $PREPARSER ./input +651. regression.at:1628: testing LAC: Exploratory stack ... +./regression.at:1713: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +stderr: +input: +./calc.at:1476: cat stderr +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./regression.at:437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: cat stderr +input: + | 1//2 +./calc.at:1492: $PREPARSER ./calc input +./regression.at:1144: $PREPARSER ./dancer +stderr: + | (* *) + (*) + (*) +stderr: +./calc.at:1480: $PREPARSER ./calc input +input: + | (#) + (#) = 2222 +./calc.at:1482: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +syntax error: invalid character: '#' +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +652. regression.at:1739: testing LAC: Memory exhaustion ... Starting parse Entering state 0 Reading a token @@ -224551,85 +217519,77 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -627. regression.at:85: testing Early token definitions with --yacc ... -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '=' -./regression.at:115: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc -o input.c input.y -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1771: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y +./regression.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=ielr input.y input: - | - | +1 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr stderr: -./calc.at:1489: $PREPARSER ./calc input -syntax error, unexpected '=' +./calc.at:1477: cat stderr +stderr: +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1486: cat stderr stderr: +./calc.at:1487: $PREPARSER ./calc input +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error, unexpected ':' +stderr: +stderr: +./regression.at:1144: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected a, expecting "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" +syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +Starting parse +Entering state 0 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) Starting parse Entering state 0 Reading a token @@ -224649,7 +217609,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -224676,7 +217636,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -224684,7 +217644,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -224694,16 +217654,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) -> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (2222) @@ -224712,22 +217672,33 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1491: "$PERL" -pi -e 'use strict; +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +632. regression.at:437: ok + +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +stdout: +./regression.at:1221: $PREPARSER ./expect2 +653. regression.at:1874: testing Lex and parse params: yacc.c ... +./regression.at:1874: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -224737,7 +217708,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -224747,45 +217722,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -stderr: -./calc.at:1491: cat stderr -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1482: cat stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -224795,7 +217740,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: "$PERL" -pi -e 'use strict; +./regression.at:1713: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -224805,242 +217753,428 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1485: cat stderr -stdout: -./calc.at:1494: cat stderr -625. regression.at:25: stderr: -input: - ok -./calc.at:1479: cat stderr -628. regression.at:127: testing Early token definitions without --yacc ... -./calc.at:1487: cat stderr -input: -stderr: - | (!!) + (1 2) = 1 -./calc.at:1482: cat stderr -./calc.at:1491: $PREPARSER ./calc input -./regression.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -Starting parse -Entering state 0 +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 +Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stdout: - | (1 + # + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () stderr: +stdout: +./existing.at:1460: $PREPARSER ./input input: -stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -input: - | - | +1 -./calc.at:1479: $PREPARSER ./calc input my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: input: - | (#) + (#) = 2222 -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1494: cat stderr +input: +input: + | (1 + # + 1) = 1111 + | (1 + # + 1) = 1111 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1476: $PREPARSER ./calc input +./regression.at:1874: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stderr: +stderr: +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none +stderr: +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +./calc.at:1486: cat stderr +./calc.at:1489: cat stderr +./calc.at:1491: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1492: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1486: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -225048,6 +218182,28 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 @@ -225060,22 +218216,49 @@ Shifting token '+' () Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token '+' () -Error: discarding token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token number (1) -Error: discarding token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -225085,66 +218268,58 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 27 +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () - | (1 + #) = 1111 - -stderr: -input: -./calc.at:1487: $PREPARSER ./calc input -stderr: -stdout: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -syntax error, unexpected '+' - | 1 + 2 * 3 + !+ ++ -./calc.at:1482: $PREPARSER ./calc input -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -225158,18 +218333,37 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '*' () +Error: popping nterm exp (2) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -225181,32 +218375,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -225220,180 +218422,159 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () +1.6: syntax error: invalid character: '#' stderr: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1489: cat stderr +syntax error, unexpected LEFT +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '+', expecting A or B Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 +Reading a token Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -629. regression.at:173: testing Braces parsing ... -./regression.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -stderr: -stderr: -stderr: -stderr: -Starting parse -Entering state 0 + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -225402,80 +218583,165 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () -======== Testing with C++ standard flags: '' +619. existing.at:808: ./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + ok + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:1221: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +633. regression.at:438: 639. regression.at:1144: ok + ok +stderr: +syntax error: invalid character: '#' + + + +stderr: +input: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1263: $PREPARSER ./input --debug +input: +./calc.at:1479: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1482: $PREPARSER ./calc input + | 1//2 +./calc.at:1494: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -225541,88 +218807,11 @@ $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -syntax error, unexpected '+' input: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: $PREPARSER ./calc /dev/null +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -225734,91 +218923,85 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !- ++ -stderr: -./calc.at:1486: $PREPARSER ./calc input -stdout: -./regression.at:116: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stderr: -stderr: -./types.at:139: ./check +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Next token is token '+' () +Reading a token +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -225826,271 +219009,384 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1494: cat stderr +./calc.at:1485: cat stderr +622. existing.at:1460: ok +syntax error: invalid character: '#' +642. regression.at:1221: ok +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 +Reading a token Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1487: cat stderr -Starting parse -Entering state 0 + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.3: ) +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +stderr: +656. regression.at:1877: testing Lex and parse params: glr.cc ... +654. regression.at:1875: testing Lex and parse params: glr.c ... +655. regression.at:1876: testing Lex and parse params: lalr1.cc ... +./regression.at:1876: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./regression.at:1875: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: Starting parse Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) +Stack now 0 +Reducing stack by rule 1 (line 20): +-> $$ = nterm start () Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 +Stack now 0 1 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token 'a' (PRINTER) +syntax error, unexpected 'a', expecting end of file +Error: popping nterm start () +Stack now 0 +Cleanup: discarding lookahead token 'a' (PRINTER) +DESTRUCTOR +Stack now 0 +1.6: syntax error: invalid character: '#' +./regression.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +Starting parse +Entering state 0 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1491: cat stderr -./calc.at:1482: $PREPARSER ./calc input -input: +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +stdout: +./regression.at:1263: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1145: $PREPARSER ./dancer + stderr: -./calc.at:1479: cat stderr +stderr: +stdout: +stdout: +./regression.at:1483: $PREPARSER ./input +./existing.at:1460: $PREPARSER ./input +stderr: +input.y: In function 'yyparse': +input.y:59:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] + 59 | fprintf (stderr, " yymsg_alloc = %d\n", yymsg_alloc); + | ^~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~ + | | + | long int +input.y:60:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] + 60 | fprintf (stderr, " YYSTACK_ALLOC_MAXIMUM = %d\n", + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:62:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] + 62 | fprintf (stderr, " YYSIZE_MAXIMUM = %d\n", YYSIZE_MAXIMUM); + | ^~~~~~~~~~~~~~~~~~~~~~~~~ +stdout: +./regression.at:1613: $PREPARSER ./input +./regression.at:1420: diff lalr.c ielr.c +657. regression.at:1878: testing Lex and parse params: glr2.cc ... +./regression.at:1878: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +input: +input: +input: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./regression.at:1877: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./regression.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1492: cat stderr input: +./regression.at:1876: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./calc.at:1480: cat stderr +./regression.at:1875: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: - | (# + 1) = 1111 -./calc.at:1487: $PREPARSER ./calc input -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -226100,95 +219396,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (1 + # + 1) = 1111 + | (* *) + (*) + (*) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (* *) + (*) + (*) + | (!!) + (1 2) = 1 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: ./calc.at:1485: cat stderr -input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] -input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] -input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] -input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] -input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] -input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] -input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] -input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] -input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] -input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] -input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] -input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: $PREPARSER ./calc /dev/null +./calc.at:1491: $PREPARSER ./calc input +syntax error, unexpected ':' +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B or 123456789112345678921234567893123456789412345678951234567896123C +syntax error, unexpected 'd' +syntax error +memory exhausted +syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B +syntax error, unexpected end of file, expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B Starting parse Entering state 0 Reading a token @@ -226246,30 +219485,16 @@ Shifting token '!' () Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): $1 = token '!' () - $2 = token '-' () + $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1479: $PREPARSER ./calc /dev/null -./regression.at:162: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -input: -stderr: -./calc.at:1494: cat stderr -syntax error, unexpected end of input -./regression.at:187: grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c -./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 -./calc.at:1486: "$PERL" -pi -e 'use strict; +./regression.at:1613: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -226279,14 +219504,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + 1) / (1 - 1) +./calc.at:1487: cat stderr +./regression.at:1483: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1145: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1485: $PREPARSER ./calc input -630. regression.at:196: testing Rule Line Numbers ... -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: + +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) stderr: - | (- *) + (1 2) = 1 Starting parse Entering state 0 Reading a token @@ -226294,145 +219529,24 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - { tests = {{{{{{{{{{}}}}}}}}}}; } -stderr: -./calc.at:1491: $PREPARSER ./calc input -syntax error, unexpected end of input -./regression.at:232: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -v input.y -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -629. regression.at:173: ok -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 @@ -226440,7 +219554,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -226452,188 +219566,107 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 28 +Entering state 29 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () $3 = nterm exp (1) --> $$ = nterm exp (0) +-> $$ = nterm exp (3) Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -input: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stdout: -./calc.at:1486: cat stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -226642,112 +219675,101 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) -> $$ = nterm exp (2) Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1494: $PREPARSER ./calc input +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none Starting parse Entering state 0 Reading a token @@ -226755,103 +219777,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.14: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -226864,96 +219881,232 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -626. regression.at:55: ok -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -input: - | (#) + (#) = 2222 -./calc.at:1489: cat stderr - -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1486: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1487: cat stderr -stderr: -stderr: -stdout: +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -226961,103 +220114,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.14: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -227070,8 +220218,52 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +648. regression.at:1408: ok +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +649. regression.at:1430: ok +644. regression.at:1230: 623. existing.at:1460: ok + ok +stderr: +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.6: syntax error: invalid character: '#' + + + + +658. regression.at:1889: testing stdio.h is not needed ... +./regression.at:1906: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +661. push.at:145: testing Unsupported Skeletons ... +./push.at:156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +660. push.at:84: testing Multiple impure instances ... +659. push.at:25: testing Memory Leak for Early Deletion ... +./push.at:134: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./push.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +662. push.at:167: testing Pstate reuse ... +./push.at:276: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y input: -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -227080,15 +220272,71 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: ' expout || exit 77 -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +./regression.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +input: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1485: $PREPARSER ./calc input + | (- *) + (1 2) = 1 + | error +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) stderr: - -./existing.at:74: $PREPARSER ./input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1489: $PREPARSER ./calc input +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +./calc.at:1486: $PREPARSER ./calc input +650. regression.at:1504: ok +640. regression.at:1145: ok input: +./regression.at:1878: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./calc.at:1469: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: cat stderr +stderr: +stderr: stderr: +error: null divisor +stderr: +stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./push.at:134: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS Starting parse Entering state 0 Reading a token @@ -227096,12 +220344,15 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -227123,12 +220374,12 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -227142,37 +220393,52 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -227182,104 +220448,76 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (1 + # + 1) = 1111 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error, unexpected '*', expecting NEWLINE or '{' or ';' -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; -stderr: - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1487: $PREPARSER ./calc input -./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -227287,19 +220525,28 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -227314,19 +220561,28 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -227334,7 +220590,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -227344,52 +220600,175 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2222) + $3 = nterm exp (1) +error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./regression.at:235: cat input.output +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +stderr: +stderr: +stderr: +stderr: +661. push.at:145: ok +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.6: syntax error: invalid character: '#' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +stderr: stderr: -./calc.at:1479: cat stderr Starting parse Entering state 0 Reading a token @@ -227637,22 +221016,117 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1485: cat stderr -./calc.at:1491: cat stderr -stderr: -616. existing.at:74: ok -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -227681,12 +221155,6 @@ Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -227736,11 +221204,26 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + + + stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1482: "$PERL" -pi -e 'use strict; +stderr: +stderr: +stdout: +stdout: +stdout: +./regression.at:1874: $PREPARSER ./input +./regression.at:1713: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -227750,17 +221233,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./push.at:75: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./push.at:276: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + | (1 + #) = 1111 + | 1 + 2 * 3 + !+ ++ + | (1 + 1) / (1 - 1) +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1469: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -227768,141 +221264,34 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 @@ -227914,49 +221303,25 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -227964,11 +221329,11 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (4444) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () @@ -227977,42 +221342,104 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -630. regression.at:196: ok -stdout: -./types.at:139: ./check -./calc.at:1479: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +error: null divisor +./regression.at:1713: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -228042,12 +221469,6 @@ Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -228097,33 +221518,50 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -input: +./regression.at:1874: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +664. c++.at:107: testing C++ Variant-based Symbols Unit Tests ... +./c++.at:234: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.yy +665. c++.at:247: testing Multiple occurrences of $n and api.value.automove ... +663. c++.at:26: testing C++ Locations Unit Tests ... +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.yy +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: -559. calc.at:1485: ok - | (* *) + (*) + (*) -./calc.at:1491: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./regression.at:1222: $PREPARSER ./expect2 +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Werror +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1477: cat stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./calc.at:1494: cat stderr -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1486: cat stderr -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input: - | (# + 1) = 1111 stderr: -stdout: -./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./calc.at:1494: $PREPARSER ./calc input stderr: -./calc.at:1487: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; +./regression.at:1713: grep 'syntax error,' stderr.txt +stderr: +stderr: +653. regression.at:1874: ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -228133,218 +221571,302 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1482: cat stderr -631. regression.at:345: testing Mixing %token styles ... +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected '+', expecting A or B +error: null divisor Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -Wall -o input.c input.y -./calc.at:1489: cat stderr - -input: +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + ok +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1222: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) stderr: -input: +======== Testing with C++ standard flags: '' Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (1.7: ) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1489: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1486: $PREPARSER ./calc input -./existing.at:808: grep '^State.*conflicts:' input.output - | (1 + 1) / (1 - 1) -input: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: $PREPARSER ./calc input - | (#) + (#) = 2222 stderr: -./calc.at:1482: $PREPARSER ./calc input +stderr: +stdout: +input.yy:16.33-34: error: multiple occurrences of $2 with api.value.automove [-Werror=other] + 16 | | "twice" exp { $$ = $2 + $2; } + | ^~ +input.yy:17.33-36: error: multiple occurrences of $2 with api.value.automove [-Werror=other] + 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } + | ^~~~ +input.yy:17.40-41: error: multiple occurrences of $2 with api.value.automove [-Werror=other] + 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } + | ^~ +./regression.at:1394: $PREPARSER ./input input: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./calc.at:1491: cat stderr +input: +input: +stderr: +./regression.at:1713: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt + | (1 + 1) / (1 - 1) + | (1 + 1) / (1 - 1) + | (- *) + (1 2) = 1 + | error +./calc.at:1492: $PREPARSER ./calc input stderr: +./regression.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $PREPARSER ./calc input +stderr: +./calc.at:1494: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -228352,11 +221874,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -228389,11 +221911,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token @@ -228418,10 +221940,49 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () +stderr: +./calc.at:1477: $PREPARSER ./calc input +643. regression.at:1222: ok +./calc.at:1486: cat stderr +error: null divisor +./calc.at:1482: cat stderr +./c++.at:263: sed 's,.*/$,,' stderr 1>&2 +input: +input: +./calc.at:1491: cat stderr +./calc.at:1494: cat stderr +./calc.at:1486: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr + | 1 = 2 = 3 + | 1 + 2 * 3 + !- ++ +./calc.at:1489: cat stderr +./calc.at:1491: $PREPARSER ./calc input +input: +666. c++.at:566: testing Variants lalr1.cc ... +./calc.at:1480: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: + +1.11-17: error: null divisor +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +error: null divisor Starting parse Entering state 0 Reading a token @@ -228429,14 +221990,20 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) Reading a token Next token is token ')' (1.5: ) @@ -228459,71 +222026,66 @@ Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' (1.15: ) Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -228538,12 +222100,58 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) | (!!) + (1 2) = 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1489: $PREPARSER ./calc input -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:1713: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +./calc.at:1478: cat stderr stderr: +input: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +647. regression.at:1355: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok + | (#) + (#) = 2222 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: $PREPARSER ./calc input stderr: stderr: stderr: +stdout: Starting parse Entering state 0 Reading a token @@ -228551,16 +222159,20 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -228578,12 +222190,21 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -228608,19 +222229,20 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2222) + $3 = nterm exp (1) +error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -228637,139 +222259,174 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: +658. regression.at:1889: 1.11-17: error: null divisor + ok +stderr: +stdout: +./push.at:134: $PREPARSER ./input + +./regression.at:1713: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt + +input: +input: +./calc.at:1487: cat stderr +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=error +input: +input: +./calc.at:1485: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +input: + | (!!) + (1 2) = 1 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1480: cat stderr +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (- *) + (1 2) = 1 + | 1 + 2 * 3 + !+ ++ + | (- *) + (1 2) = 1 +./push.at:134: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1494: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS Starting parse Entering state 0 Reading a token @@ -228777,146 +222434,120 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token ')' () +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input +error: null divisor +stderr: +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Wnone,none -Werror --trace=none Starting parse Entering state 0 Reading a token @@ -229028,11 +222659,162 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -stdout: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +668. c++.at:568: testing Variants lalr1.cc parse.assert api.value.automove ... +667. c++.at:567: testing Variants lalr1.cc parse.assert ... +669. c++.at:569: testing Variants lalr1.cc parse.assert %locations ... +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +input: +./regression.at:1714: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./calc.at:1492: cat stderr +./calc.at:1485: cat stderr + | (- *) + (1 2) = 1 + | (# + 1) = 1111 +./calc.at:1487: $PREPARSER ./calc input stderr: -632. regression.at:437: testing Token definitions: parse.error=detailed ... -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +./calc.at:1485: $PREPARSER ./calc input +stderr: + | (1 + 1) / (1 - 1) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: $PREPARSER ./calc input +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) Starting parse Entering state 0 Reading a token @@ -229040,15 +222822,19 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token ')' () +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () Entering state 11 +Reading a token Next token is token ')' () Shifting token ')' () Entering state 25 @@ -229067,12 +222853,21 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -229097,19 +222892,20 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2222) + $3 = nterm exp (1) +error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -229126,137 +222922,19 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () stderr: +554. calc.at:1480: ok +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Starting parse Entering state 0 Reading a token @@ -229264,20 +222942,25 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -229295,21 +222978,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 @@ -229334,11 +223017,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -229364,252 +223047,245 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -627. regression.at:85: ok -./calc.at:1479: cat stderr -./calc.at:1491: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: cat stderr -input: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1479: $PREPARSER ./calc input -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Werror -input: -./calc.at:1487: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1486: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1491: $PREPARSER ./calc input - - | (1 + # + 1) = 1111 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -633. regression.at:438: testing Token definitions: parse.error=verbose ... -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -634. regression.at:447: testing Characters Escapes ... -./calc.at:1489: cat stderr -./regression.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -562. calc.at:1487: ok Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 29 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '+' (1.20: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 265 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -stdout: -628. regression.at:127: stderr: - ok -stderr: -Starting parse -Entering state 0 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -229622,174 +223298,250 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr -input: -syntax error, unexpected number -error: 2222 != 1 - | (# + 1) = 1111 -635. regression.at:480: testing Web2c Report ... -stderr: -input: -./regression.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v input.y -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (- *) + (1 2) = 1 -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 -input: -./calc.at:1489: $PREPARSER ./calc input +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + +./push.at:135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +stderr: +stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token ')' () Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token ')' (1.11: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: - | (1 + #) = 1111 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -229815,7 +223567,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -229829,13 +223581,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -229844,27 +223596,273 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1482: $PREPARSER ./calc input - -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +stderr: stderr: stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -229877,10 +223875,10 @@ Entering state 2 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Shifting token error () Entering state 9 -Reducing stack 0 by rule 15 (line 106): +Reducing stack 0 by rule 15 (line 119): $1 = token '-' () $2 = token error () Shifting token error () @@ -229893,7 +223891,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -229911,13 +223909,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token Next token is token number (2) -syntax error, unexpected number +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -229929,7 +223927,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -229937,7 +223935,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -229950,13 +223948,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) @@ -229966,34 +223964,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () - -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -230001,26 +223986,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -230034,73 +224014,29 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 @@ -230112,32 +224048,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -230147,95 +224091,130 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -stderr: -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token ')' () +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -input.y:3.1-5: error: useless precedence and associativity for "||" [-Werror=precedence] -input.y:3.1-5: error: useless precedence and associativity for "<=" [-Werror=precedence] +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=none -Werror --trace=none +./calc.at:1480: cat stderr +stderr: stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -230243,25 +224222,20 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -230352,150 +224326,84 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -stderr: -./calc.at:1494: cat stderr +670. c++.at:570: testing Variants lalr1.cc parse.assert %code {\n#define TWO_STAGE_BUILD\n} ... Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1479: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:357: sed 's,.*/$,,' stderr 1>&2 -636. regression.at:661: testing Web2c Actions ... -input: -./regression.at:674: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -input: +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (1 + 1) / (1 - 1) - | (- *) + (1 2) = 1 -./calc.at:1482: "$PERL" -pi -e 'use strict; +stdout: +./push.at:76: $PREPARSER ./input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -230505,21 +224413,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:506: cat input.output +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +input: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +input: + | 1 + 2 * 3 + !- ++ +stderr: ./calc.at:1479: $PREPARSER ./calc input -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=error -./calc.at:1486: "$PERL" -pi -e 'use strict; + | (* *) + (*) + (*) +./calc.at:1485: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1492: $PREPARSER ./calc input +./push.at:76: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -230529,8 +224446,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -230540,272 +224456,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:466: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -635. regression.at:480: ok -./calc.at:1491: cat stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr -./calc.at:1486: cat stderr -stderr: +./push.at:135: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1714: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1469: cat stderr ./calc.at:1482: cat stderr -637. regression.at:812: testing Useless Tokens ... -./regression.at:912: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -o input.c input.y -input: -stderr: | (#) + (#) = 2222 -input: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1486: $PREPARSER ./calc input -input: - | (* *) + (*) + (*) + | 1 + 2 * 3 + !- ++ ./calc.at:1489: $PREPARSER ./calc input -638. regression.at:1143: testing Dancer ... -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -input: - +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1492: cat stderr stderr: stderr: -./regression.at:1143: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y stderr: +stderr: +659. push.at:25: 1.11-17: error: null divisor + ok Starting parse Entering state 0 Reading a token @@ -230813,22 +224478,8 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -230845,7 +224496,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -230859,13 +224510,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -230874,118 +224525,167 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.3: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token number (2) +Error: discarding token number (2) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) Starting parse Entering state 0 Reading a token @@ -231008,7 +224708,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -231035,7 +224735,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -231043,7 +224743,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -231069,7 +224769,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -231077,7 +224777,7 @@ Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -231086,23 +224786,131 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () - | (# + 1) = 1111 -./calc.at:1482: $PREPARSER ./calc input +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] +input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] +input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] +input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] +input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] +input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] +input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] +input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] +input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] +input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] +input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] +input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] + +./calc.at:1477: cat stderr +./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1491: cat stderr ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -231113,23 +224921,154 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +665. c++.at:247: input: + ok +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:25.8-14: note: previous declaration - 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +545. calc.at:1469: | (# + 1) = 1111 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1494: cat stderr +./calc.at:1476: cat stderr +input: +stderr: +input: + ok +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: Starting parse Entering state 0 Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) + | (- *) + (1 2) = 1 +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 @@ -231149,7 +225088,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -231176,7 +225115,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -231184,7 +225123,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -231210,7 +225149,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -231218,7 +225157,7 @@ Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -231227,132 +225166,98 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: +./calc.at:1489: $PREPARSER ./calc input + | + | +1 +./calc.at:1491: $PREPARSER ./calc input +549. calc.at:1477: ok stderr: -./regression.at:679: cat tables.c +syntax error: invalid character: '#' +syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1494: cat stderr +671. c++.at:571: testing Variants lalr1.cc parse.assert api.token.constructor ... +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y Starting parse Entering state 0 Reading a token @@ -231422,7 +225327,38 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +input: +./calc.at:1486: cat stderr + +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +input: + | (* *) + (*) + (*) +./calc.at:1492: $PREPARSER ./calc input ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: + | 1 = 2 = 3 Starting parse Entering state 0 Reading a token @@ -231430,6 +225366,42 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 @@ -231438,24 +225410,13 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +Next token is token number (2) +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -231467,32 +225428,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -231506,10 +225475,7 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1494: cat stderr -./calc.at:1479: cat stderr -stderr: -./regression.at:437: sed 's,.*/$,,' stderr 1>&2 +./calc.at:1494: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -231579,42 +225545,12 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -636. regression.at:661: ok -input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:25.8-14: note: previous declaration - 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +./calc.at:1479: cat stderr input: -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1479: $PREPARSER ./calc input -569. calc.at:1494: ok -./calc.at:1486: "$PERL" -pi -e 'use strict; +./calc.at:1487: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -231624,19 +225560,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error -./calc.at:1491: cat stderr -./calc.at:1489: cat stderr -./regression.at:438: sed 's,.*/$,,' stderr 1>&2 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Wnone,none -Werror --trace=none -stderr: ./calc.at:1486: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -231645,88 +225570,18 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg +./calc.at:1482: cat stderr ' expout || exit 77 -input: -input: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./torture.at:140: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - | (1 + #) = 1111 -./calc.at:1491: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ - -./calc.at:1489: $PREPARSER ./calc input - stderr: -input: - | (1 + 1) / (1 - 1) -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +547. calc.at:1476: ok Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -231734,332 +225589,150 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1482: cat stderr -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) stderr: -./regression.at:1143: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS + +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1494: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1486: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -639. regression.at:1144: testing Dancer %glr-parser ... -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.17: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -232072,25 +225745,6 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1144: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y -input: -input: - | (1 + # + 1) = 1111 -./calc.at:1482: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1489: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -232098,32 +225752,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -232137,46 +225780,9 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -232190,54 +225796,36 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token number (1) Shifting token number (1) @@ -232245,38 +225833,21 @@ Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -232290,8 +225861,38 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; +input: +input: +input: +input: +input: + | (* *) + (*) + (*) + | (#) + (#) = 2222 +./calc.at:1478: cat stderr +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | (- *) + (1 2) = 1 +./calc.at:1479: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -232301,73 +225902,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1492: cat stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: cat stderr +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +stderr: + | (* *) + (*) + (*) +./calc.at:1487: $PREPARSER ./calc input + | (* *) + (*) + (*) Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -232396,12 +225965,6 @@ Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -232451,395 +226014,108 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1479: cat stderr -./calc.at:1491: cat stderr -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=none -Werror --trace=none -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -input: -./calc.at:1479: $PREPARSER ./calc input - | (# + 1) = 1111 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stdout: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: cat stderr -./existing.at:808: $PREPARSER ./input -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '*' () +syntax error +Shifting token error () Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./calc.at:1482: cat stderr -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -641. regression.at:1220: testing Expecting two tokens ... -640. regression.at:1145: testing Dancer lalr1.cc ... - | 1 + 2 * 3 + !- ++ -./calc.at:1479: $PREPARSER ./calc input -./regression.at:1145: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.cc dancer.y -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -./regression.at:1220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y -input: -561. calc.at:1486: ok -620. existing.at:808: ok -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '*' () +syntax error +Shifting token error () Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (1 + 1) / (1 - 1) -./calc.at:1489: cat stderr -stderr: -./calc.at:1482: $PREPARSER ./calc input -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -631. regression.at:345: ok -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./regression.at:1144: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token ')' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -232853,140 +226129,167 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -input: - | (#) + (#) = 2222 -stderr: -./calc.at:1491: cat stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: $PREPARSER ./calc input - +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +673. c++.at:573: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations ... +stdout: +672. c++.at:572: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} ... +input: +stderr: +stderr: +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: + | (1 + # + 1) = 1111 +./push.at:277: ./input +./calc.at:1485: cat stderr +stderr: +stderr: +./calc.at:1485: $PREPARSER ./calc input +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +======== Testing with C++ standard flags: '' +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y Starting parse Entering state 0 Reading a token @@ -232994,19 +226297,22 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -233021,94 +226327,85 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -stderr: -input: -input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stdout: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (1 + # + 1) = 1111 -./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./calc.at:1491: $PREPARSER ./calc input - -stdout: -stderr: -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1480: cat stderr Starting parse Entering state 0 Reading a token @@ -233116,32 +226413,42 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token Next token is token ')' (1.11: ) Entering state 11 @@ -233149,37 +226456,59 @@ Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -233192,8 +226521,8 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1479: cat stderr -./existing.at:808: grep '^State.*conflicts:' input.output +stderr: +input: Starting parse Entering state 0 Reading a token @@ -233201,12 +226530,21 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -233228,12 +226566,21 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -233258,19 +226605,20 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2222) + $3 = nterm exp (1) +error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -233291,17 +226639,6 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -634. regression.at:447: ok -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -stderr: -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none - | (#) + (#) = 2222 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1482: cat stderr -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: Starting parse Entering state 0 Reading a token @@ -233309,32 +226646,42 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token Next token is token ')' (1.11: ) Entering state 11 @@ -233342,37 +226689,59 @@ Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -233385,41 +226754,16 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -557. calc.at:1482: ok -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +551. calc.at:1478: | + | +1 + ok +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./regression.at:1220: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS -./regression.at:1145: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o dancer dancer.cc $LIBS -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -643. regression.at:1222: testing Expecting two tokens lalr1.cc ... -642. regression.at:1221: testing Expecting two tokens %glr-parser ... -./regression.at:1221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y -./calc.at:1479: "$PERL" -pi -e 'use strict; +./regression.at:1875: $PREPARSER ./input +662. push.at:167: ok +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -233429,17 +226773,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:1222: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.cc expect2.y - -./calc.at:1491: cat stderr -input: -./regression.at:917: cat tables.c -./calc.at:1479: cat stderr - | (1 + #) = 1111 -./calc.at:1489: $PREPARSER ./calc input -./regression.at:438: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -input: Starting parse Entering state 0 Reading a token @@ -233447,11 +226780,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -233484,11 +226817,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token @@ -233513,18 +226846,53 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () - | (1 + #) = 1111 -637. regression.at:812: ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -input: - | (1 + 1) / (1 - 1) -./calc.at:1491: $PREPARSER ./calc input +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1479: $PREPARSER ./calc input +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +675. c++.at:584: testing Variants and Typed Midrule Actions ... +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +syntax error: invalid character: '#' +syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -233532,6 +226900,42 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 @@ -233540,18 +226944,13 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +Next token is token number (2) +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -233563,32 +226962,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -233602,282 +227009,121 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./regression.at:437: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -644. regression.at:1230: testing Braced code in declaration in rules section ... -stderr: -stderr: -./regression.at:1261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./regression.at:1222: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o expect2 expect2.cc $LIBS -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -645. regression.at:1291: testing String alias declared after use ... - -stderr: -./regression.at:1304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -syntax error: invalid character: '#' -./calc.at:1489: cat stderr -./calc.at:1491: cat stderr -646. regression.at:1314: testing Extra lookahead sets in report ... -./regression.at:1329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y -565. calc.at:1491: ok -./regression.at:1221: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS -input: - | (# + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input -stderr: +Cleanup: popping token end of file () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -233885,8 +227131,22 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -233903,7 +227163,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -233917,13 +227177,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -233932,34 +227192,24 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./regression.at:1875: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + Starting parse Entering state 0 Reading a token @@ -233967,18 +227217,42 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token Next token is token '+' () -Error: discarding token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -233990,32 +227264,54 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 27 +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -234025,24 +227321,130 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -645. regression.at:1291: ok -./calc.at:1479: cat stderr -./regression.at:1262: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: -647. regression.at:1355: testing Token number in precedence declaration ... -./regression.at:1330: sed -n '/^State 1$/,/^State 2$/p' input.output +stderr: +674. c++.at:574: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations api.value.automove ... -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y - | (# + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./calc.at:1489: cat stderr stderr: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -234052,10 +227454,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1489: cat stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; +input: +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -234066,103 +227466,274 @@ }eg ' expout || exit 77 input: - | (1 + # + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input -646. regression.at:1314: ok -./calc.at:1479: cat stderr -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./push.at:135: $PREPARSER ./input + | (1 + #) = 1111 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS + | 1 + 2 * 3 + !+ ++ +./calc.at:1485: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () +Next token is token ')' (1.5: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token ')' () +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +stdout: +./push.at:135: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +654. regression.at:1875: ok +./regression.at:1772: $PREPARSER ./input --debug +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 -649. regression.at:1430: testing parse.error=verbose and YYSTACK_USE_ALLOCA ... -./regression.at:1481: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +676. c++.at:794: testing Doxygen Public Documentation ... +677. c++.at:795: testing Doxygen Private Documentation ... +./c++.at:795: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:794: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +input: +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + | (#) + (#) = 2222 +./calc.at:1482: cat stderr +./calc.at:1491: cat stderr +./calc.at:1494: cat stderr +./calc.at:1489: cat stderr +./calc.at:1489: $PREPARSER ./calc input +./c++.at:794: doxygen --version || exit 77 +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./calc.at:1492: cat stderr +./c++.at:795: doxygen --version || exit 77 +stderr: +./calc.at:1491: $PREPARSER ./calc /dev/null +stderr: +syntax error: invalid character: '#' +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +LAC: initial context established for "end of file" +LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) +memory exhausted +Cleanup: discarding lookahead token "end of file" () +Stack now 0 +./regression.at:1772: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 @@ -234170,7 +227741,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -234199,7 +227770,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -234213,13 +227784,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -234228,30 +227799,34 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -648. regression.at:1408: testing parse-gram.y: LALR = IELR ... -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror -./regression.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=lalr input.y -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +678. c++.at:848: testing Relative namespace references ... +660. push.at:84: ok +./c++.at:849: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy input: - | (1 + # + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1489: "$PERL" -pi -e 'use strict; +input: + | (* *) + (*) + (*) +./calc.at:1489: $PREPARSER ./calc input +input: + | + | +1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -234262,117 +227837,219 @@ }eg ' expout || exit 77 stderr: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr -stderr: -syntax error: invalid character: '#' -input: - | (1 + 1) / (1 - 1) -./calc.at:1489: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + | (1 + # + 1) = 1111 +./calc.at:1482: $PREPARSER ./calc input +--- /dev/null 2026-10-11 17:46:22.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/stderr 2026-10-12 20:01:57.936870317 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/test-source: line 180: doxygen: command not found +stdout: +stderr: +./calc.at:1494: $PREPARSER ./calc input +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +--- /dev/null 2026-10-11 17:46:22.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/stderr 2026-10-12 20:01:57.936870317 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/test-source: line 180: doxygen: command not found +syntax error: invalid character: '#' +stdout: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -234386,20 +228063,6 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -650. regression.at:1504: testing parse.error=verbose overflow ... -./regression.at:1604: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -234407,101 +228070,83 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -234511,91 +228156,60 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -./regression.at:1482: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input.y:24.5-19: error: rule useless in parser due to conflicts [-Werror=other] -input.y:28.5-19: error: rule useless in parser due to conflicts [-Werror=other] -input.y:18.1-5: error: useless precedence and associativity for TK1 [-Werror=precedence] -./calc.at:1479: cat stderr -stderr: -stdout: -./existing.at:808: $PREPARSER ./input -./regression.at:1388: sed 's,.*/$,,' stderr 1>&2 -./calc.at:1489: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +input: +./calc.at:1487: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +677. c++.at:795: ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:1611: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error -input: - | (1 + 1) / (1 - 1) -./calc.at:1489: cat stderr -./calc.at:1479: $PREPARSER ./calc input -619. existing.at:808: ok - -stderr: -error: null divisor -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -564. calc.at:1489: ok -stderr: -stdout: -./calc.at:1492: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -stderr: - -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none -error: null divisor -stderr: -stdout: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -stdout: -./regression.at:438: $PREPARSER ./input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: cat stderr ./calc.at:1492: $PREPARSER ./calc input -./regression.at:1143: $PREPARSER ./dancer -stderr: -651. regression.at:1628: testing LAC: Exploratory stack ... -syntax error, unexpected a, expecting "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" -stderr: -syntax error, unexpected ':' -./regression.at:1713: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +676. c++.at:794: ./regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y + +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -234605,891 +228219,133 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:1143: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./calc.at:1494: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1479: cat stderr -633. regression.at:438: ok +./calc.at:1486: cat stderr +./calc.at:1486: cat stderr +input: +input: +======== Testing with C++ standard flags: '' +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1485: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1487: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1492: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = n638. regression.at:1143: ok -term exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input (14.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -input: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -652. regression.at:1739: testing LAC: Memory exhaustion ... -./regression.at:1771: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -./calc.at:1494: $PREPARSER ./calc input -553. calc.at:1479: ok -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none -stderr: +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) + skipped (c++.at:795) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -235497,11 +228353,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -235509,840 +228365,563 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: + skipped (c++.at:794) +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./calc.at:1482: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) + + +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +input: +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 + | (* *) + (*) + (*) +Starting parse +Entering state 0 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input (14.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -stdout: -input: - - | 1 2 -./regression.at:1713: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1220: $PREPARSER ./expect2 -./calc.at:1492: $PREPARSER ./calc input +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + | 1 + 2 * 3 + !+ ++ stderr: -input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) stderr: -stdout: +./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1486: $PREPARSER ./calc input + | (- *) + (1 2) = 1 stderr: -syntax error, unexpected '+', expecting A or B +./calc.at:1494: $PREPARSER ./calc input stderr: -./regression.at:1220: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1485: cat stderr Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -236350,11 +228929,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -236362,845 +228941,1227 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1491: cat stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '=' (2.12: ) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) Entering state 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nstdout: -term exp (5.1-4: -1) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +679. c++.at:854: testing Absolute namespace references ... +./c++.at:855: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +680. c++.at:863: testing Syntactically invalid namespace references ... +./c++.at:864: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stderr: +stdout: +./torture.at:395: $PREPARSER ./input +======== Testing with C++ standard flags: '' +input: +input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +input: +input: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1480: cat stderr +stderr: +stderr: + | (1 + 1) / (1 - 1) + | (1 + #) = 1111 + | 1 + 2 * 3 + !- ++ +./calc.at:1492: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ + | 1 + 2 * 3 + !- ++ +./calc.at:1487: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1479: $PREPARSER ./calc input +./torture.at:395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1485: $PREPARSER ./calc input +stderr: +./c++.at:865: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stderr: +./calc.at:1494: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 -Next token is token '\n' (7.10-8.0: ) +Reading a token +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +681. c++.at:884: testing Syntax error discarding no lookahead ... +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: +stderr: +stdout: +stderr: +stderr: +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) +./regression.at:1876: $PREPARSER ./input +./calc.at:1489: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1482: $PREPARSER ./calc input +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 28 Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) - -stderr: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + | (# + 1) = 1111 + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: $PREPARSER ./calc /dev/null stderr: -./regression.at:437: $PREPARSER ./input - +input: +./calc.at:1480: $PREPARSER ./calc input +613. torture.at:385: my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ok Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -237208,11 +230169,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -237220,861 +230181,867 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 -Next token is token '=' (2.12: ) +Reading a token +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' (2.12: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1489: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./regression.at:1876: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +./calc.at:1492: cat stderr +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = n641. regression.at:1220: ok -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected a, expecting ∃¬∩∪∀ -term exp (5.1-4: -1) +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: cat stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +655. regression.at:1876: stderr: +stderr: +syntax error: invalid character: '#' +Starting parse +Entering state 0 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) + ok +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +syntax error: invalid character: '#' +stderr: +stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1482: cat stderr +input: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input (14.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./regression.at:437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) - | 1 2 -./calc.at:1494: $PREPARSER ./calc input -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -Starting parse -Entering state 0 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -632. regression.at:437: ./regression.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - ok -./regression.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=ielr input.y -./calc.at:1492: "$PERL" -pi -e 'use strict; +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1494: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -238084,439 +231051,413 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -653. regression.at:1874: testing Lex and parse params: yacc.c ... -./regression.at:1483: $PREPARSER ./input - -input: -./regression.at:1874: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + | 1 + 2 * 3 + !+ ++ stderr: - | 1//2 Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -stderr: -./calc.at:1492: $PREPARSER ./calc input -syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B -syntax error, unexpected end of file, expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B - -./regression.at:1483: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -655. regression.at:1876: testing Lex and parse params: lalr1.cc ... -./regression.at:1876: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./calc.at:1494: cat stderr -649. regression.at:1430: ok -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1489: $PREPARSER ./calc input +stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1//2 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1492: $PREPARSER ./calc input stderr: -656. regression.at:1877: testing Lex and parse params: glr.cc ... -./regression.at:1874: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -input.y: In function 'yyparse': -input.y:59:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] - 59 | fprintf (stderr, " yymsg_alloc = %d\n", yymsg_alloc); - | ^~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~ - | | - | long int -input.y:60:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] - 60 | fprintf (stderr, " YYSTACK_ALLOC_MAXIMUM = %d\n", - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:62:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] - 62 | fprintf (stderr, " YYSIZE_MAXIMUM = %d\n", YYSIZE_MAXIMUM); - | ^~~~~~~~~~~~~~~~~~~~~~~~~ -./calc.at:1492: cat stderr -stdout: -./regression.at:1613: $PREPARSER ./input -657. regression.at:1878: testing Lex and parse params: glr2.cc ... -./regression.at:1878: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +input: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +stdout: +input: +./calc.at:1486: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -654. regression.at:1875: testing Lex and parse params: glr.c ... -./regression.at:1875: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B or 123456789112345678921234567893123456789412345678951234567896123C -syntax error, unexpected 'd' -syntax error -memory exhausted -./regression.at:1613: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1876: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -input: +./calc.at:1491: $PREPARSER ./calc input +syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -650. regression.at:1504: ok - | error -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) - -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./regression.at:1878: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./calc.at:1494: cat stderr -./regression.at:1877: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./regression.at:1875: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -658. regression.at:1889: testing stdio.h is not needed ... +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./regression.at:1714: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; input: -./regression.at:1906: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) -stdout: - | error -./calc.at:1494: $PREPARSER ./calc input - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./regression.at:1263: $PREPARSER ./input --debug -stderr: stderr: Starting parse Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 20): --> $$ = nterm start () -Entering state 1 -Stack now 0 1 Reading a token -Next token is token 'a' (PRINTER) -syntax error, unexpected 'a', expecting end of file -Error: popping nterm start () -Stack now 0 -Cleanup: discarding lookahead token 'a' (PRINTER) -DESTRUCTOR -Stack now 0 -Starting parse -Entering state 0 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -./calc.at:1492: cat stderr -./regression.at:1263: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./regression.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -644. regression.at:1230: Starting parse -Entering state 0 +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) - ok -659. push.at:25: testing Memory Leak for Early Deletion ... -input: -./push.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - | 1 = 2 = 3 -./calc.at:1492: $PREPARSER ./calc input -stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1494: cat stderr - -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1494: $PREPARSER ./calc input -./push.at:75: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1492: cat stderr -stderr: -Starting parse -Entering state 0 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -stderr: -stderr: -stdout: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -238524,134 +231465,79 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./torture.at:141: $PREPARSER ./input -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:141: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -660. push.at:84: testing Multiple impure instances ... -./push.at:134: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1494: cat stderr -604. torture.at:132: ok Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -input: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 -./calc.at:1494: $PREPARSER ./calc input - -stderr: -./calc.at:1492: cat stderr -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: $PREPARSER ./calc /dev/null -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Starting parse -Entering state 0 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./push.at:134: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: cat stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) + | (#) + (#) = 2222 +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -238661,41 +231547,74 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -661. push.at:145: testing Unsupported Skeletons ... -./calc.at:1494: $PREPARSER ./calc /dev/null -./push.at:156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./calc.at:1492: cat stderr -stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -input: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1492: $PREPARSER ./calc input -stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1486: cat stderr + | (1 + 1) / (1 - 1) Starting parse Entering state 0 Reading a token @@ -238943,14 +231862,229 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stdout: +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1485: $PREPARSER ./calc input +input: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1874: $PREPARSER ./input -661. push.at:145: ok +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: cat stderr + stderr: +input: + | (* *) + (*) + (*) +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +./c++.at:868: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stderr: + | 1 + 2 * 3 + !+ ++ +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +stderr: +stderr: +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1494: cat stderr Starting parse Entering state 0 Reading a token @@ -239198,21 +232332,523 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1420: diff lalr.c ielr.c +' expout || exit 77 +input: stderr: -./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -stdout: -./regression.at:1221: $PREPARSER ./expect2 -./regression.at:1874: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +./regression.at:1714: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -648. regression.at:1408: ok -syntax error, unexpected '+', expecting A or B -./regression.at:1221: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: $PREPARSER ./calc input - +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1487: $PREPARSER ./calc input +stderr: +input: +stderr: +./calc.at:1494: cat stderr +./calc.at:1485: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + | 1 + 2 * 3 + !- ++ + | (1 + # + 1) = 1111 +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: $PREPARSER ./calc input +./regression.at:1714: grep 'syntax error,' stderr.txt +./calc.at:1489: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -239223,14 +232859,686 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -653. regression.at:1874: ok +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1491: cat stderr + | (#) + (#) = 2222 stderr: -stdout: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1492: $PREPARSER ./calc input +syntax error: invalid character: '#' +stderr: +input: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./regression.at:1714: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !- ++ +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +558. calc.at:1485: ./calc.at:1491: cat stderr ./calc.at:1492: cat stderr - -642. regression.at:1221: ok -./existing.at:1460: $PREPARSER ./input + ok +input: +stderr: +stderr: +./regression.at:1714: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' +stderr: + | (# + 1) = 1111 +input: Starting parse Entering state 0 Reading a token @@ -239478,15 +233786,433 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: stderr: - -syntax error, unexpected LEFT +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (1 + # + 1) = 1111 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) + | (!!) + (1 2) = 1 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () input: -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./calc.at:1491: $PREPARSER ./calc input + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1482: $PREPARSER ./calc input +./regression.at:1714: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt | (!!) + (1 2) = 1 ./calc.at:1492: $PREPARSER ./calc input +input: +stderr: +stderr: + | (#) + (#) = 2222 +stderr: +./calc.at:1491: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -239610,130 +234336,2308 @@ Next token is token '*' (1.27: ) Error: discarding token '*' (1.27: ) Reading a token -Next token is token ')' (1.28: ) +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +682. c++.at:1064: testing Syntax error as exception: lalr1.cc ... +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: cat stderr +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./c++.at:869: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stderr: +./calc.at:1485: cat stderr +syntax error: invalid character: '#' +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./c++.at:1064: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !+ ++ +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: $PREPARSER ./calc input +input: +./regression.at:1715: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +./calc.at:1487: cat stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: $PREPARSER ./calc input +683. c++.at:1065: testing Syntax error as exception: glr.cc ... +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1489: cat stderr +./c++.at:1065: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | (1 + #) = 1111 +./calc.at:1487: $PREPARSER ./calc input +stderr: +./calc.at:1494: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1479: cat stderr +./calc.at:1486: cat stderr +stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1492: cat stderr +input: +./calc.at:1486: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (!!) + (1 2) = 1 +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +input: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + # + 1) = 1111 +./calc.at:1494: $PREPARSER ./calc input +stderr: +input: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +input: +556. calc.at:1482: | (#) + (#) = 2222 + ok +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | (1 + 1) / (1 - 1) +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + | (1 + #) = 1111 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1492: cat stderr +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: cat stderr +syntax error: invalid character: '#' +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1491: cat stderr +./c++.at:870: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./calc.at:1491: cat stderr + | (#) + (#) = 2222 +./calc.at:1486: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +input: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input +stdout: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Reading a token +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.42: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of input () +Cleanup: popping nterm input () stderr: Starting parse Entering state 0 @@ -239846,12 +236750,131 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -622. existing.at:1460: ok -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +input: stderr: -stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; +error: null divisor +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -239861,7 +236884,115 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: +./regression.at:1715: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1487: cat stderr +./calc.at:1489: $PREPARSER ./calc input +stderr: +input: + | (- *) + (1 2) = 1 +./calc.at:1491: $PREPARSER ./calc input +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./regression.at:1877: $PREPARSER ./input Starting parse Entering state 0 Reading a token @@ -239869,117 +237000,367 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | (1 + #) = 1111 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1485: cat stderr +stderr: +./calc.at:1482: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | (- *) + (1 2) = 1 +stderr: +./calc.at:1492: $PREPARSER ./calc input +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +error: null divisor +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -658. regression.at:1889: ok - -stderr: -./calc.at:1494: cat stderr -stdout: -./regression.at:1144: $PREPARSER ./dancer -./calc.at:1492: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -239989,121 +237370,168 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -663. c++.at:26: testing C++ Locations Unit Tests ... -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +684. c++.at:1066: testing Syntax error as exception: glr2.cc ... +input: +./c++.at:1066: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: input: - | (!!) + (1 2) = 1 -syntax error, unexpected ':' -./calc.at:1494: $PREPARSER ./calc input -664. c++.at:107: testing C++ Variant-based Symbols Unit Tests ... -662. push.at:167: testing Pstate reuse ... -./push.at:276: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:1144: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: cat stderr -./c++.at:234: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.yy + | (1 + #) = 1111 +./calc.at:1482: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: + | (# + 1) = 1111 +./calc.at:1494: cat stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -240112,38 +237540,20 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -639. regression.at:1144: ok -666. c++.at:566: testing Variants lalr1.cc ... -input: - | (- *) + (1 2) = 1 -stderr: -stderr: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: $PREPARSER ./calc input -stdout: -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: -./push.at:276: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -665. c++.at:247: testing Multiple occurrences of $n and api.value.automove ... -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.yy -./push.at:76: $PREPARSER ./input -./regression.at:1394: $PREPARSER ./input -667. c++.at:567: testing Variants lalr1.cc parse.assert ... -stderr: -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -stderr: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -stderr: -./push.at:76: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -240151,98 +237561,103 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 29 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.15: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -240255,10 +237670,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS - -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: Starting parse Entering state 0 Reading a token @@ -240375,19 +237786,23 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Werror -659. push.at:25: 647. regression.at:1355: ok - ok - - -668. c++.at:568: testing Variants lalr1.cc parse.assert api.value.automove ... -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./regression.at:1877: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1485: $PREPARSER ./calc input +stderr: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -670. c++.at:570: testing Variants lalr1.cc parse.assert %code {\n#define TWO_STAGE_BUILD\n} ... -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -240398,18 +237813,253 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +======== Testing with C++ standard flags: '' +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: -input.yy:16.33-34: error: multiple occurrences of $2 with api.value.automove [-Werror=other] - 16 | | "twice" exp { $$ = $2 + $2; } - | ^~ -input.yy:17.33-36: error: multiple occurrences of $2 with api.value.automove [-Werror=other] - 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } - | ^~~~ -input.yy:17.40-41: error: multiple occurrences of $2 with api.value.automove [-Werror=other] - 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } - | ^~ -669. c++.at:569: testing Variants lalr1.cc parse.assert %locations ... +input: +stderr: +./calc.at:1479: cat stderr stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +680. c++.at:863: Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -240526,21 +238176,10 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:263: sed 's,.*/$,,' stderr 1>&2 -stderr: -input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] -stdout: -./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./calc.at:1494: cat stderr -stderr: -stdout: -./push.at:134: $PREPARSER ./input -input: -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=error + ok | (- *) + (1 2) = 1 -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -240549,30 +238188,33 @@ ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg -' expout || exit 77 -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: ./calc.at:1494: $PREPARSER ./calc input +' expout || exit 77 +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -stdout: -./calc.at:1491: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./push.at:134: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: cat stderr stderr: stderr: -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stdout: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -240689,336 +238331,157 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1713: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1491: $PREPARSER ./calc input -stderr: -stdout: -./push.at:135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Wnone,none -Werror --trace=none -./calc.at:1492: cat stderr -stderr: -./regression.at:1713: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 +Next token is token number (1) +Error: discarding token number (1) Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Next token is token '\n' (4.10-5.0: ) +Reading a token +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = n./regression.at:1713: grep 'syntax error,' stderr.txt +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr +./calc.at:1489: cat stderr +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -241135,626 +238598,586 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -term exp (5.1-4: -1) -Entering state 8 +stderr: +./calc.at:1480: cat stderr +stderr: +656. regression.at:1877: input: +./calc.at:1492: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 +Next token is token number (1) +Error: discarding token number (1) Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + ok +Starting parse +Entering state 0 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 -Next token is token '\n' (9.15-10.0: ) +Reading a token +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 +Next token is token number (1) +Error: discarding token number (1) Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +input: +stderr: +input: + + | (#) + (#) = 2222 + | (1 + #) = 1111 +./calc.at:1486: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1494: $PREPARSER ./calc input +error: null divisor +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: cat stderr +./calc.at:1491: cat stderr +stderr: +./calc.at:1489: cat stderr +./calc.at:1492: $PREPARSER ./calc input +555. calc.at:1480: | (1 + #) = 1111 + ok +stderr: +./calc.at:1492: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1489: $PREPARSER ./calc input +input: +input: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input (14.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1713: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=none -Werror --trace=none -stderr: -./push.at:135: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (* *) + (*) + (*) -stdout: +./calc.at:1491: $PREPARSER ./calc input stderr: -./regression.at:1875: $PREPARSER ./input -./calc.at:1492: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg + | (# + 1) = 1111 +' expout || exit 77 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1487: cat stderr +560. calc.at:1486: ok Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 +Reading a token Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -241762,765 +239185,844 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 -Next token is token '\n' (2.16-3.0: ) +Reading a token +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 -Next token is token '\n' (4.10-5.0: ) +Reading a token +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +input: +685. c++.at:1360: testing Exception safety with error recovery ... +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Next token is token '\n' (7.10-8.0: ) +Reading a token +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS + | (* *) + (*) + (*) +Starting parse +Entering state 0 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Next token is token '\n' (9.15-10.0: ) +Reading a token +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: $PREPARSER ./calc input +input: +======== Testing with C++ standard flags: '' +input: +Starting parse +Entering state 0 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +stderr: +stderr: +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1485: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: $PREPARSER ./calc input +Starting parse +Entering state 0 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1482: cat stderr + +./calc.at:1494: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input (14.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./regression.at:1713: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -665. c++.at:247: ok +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +stderr: +stderr: stderr: -./regression.at:1875: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -242636,13 +240138,296 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: stderr: -stdout: - -./push.at:277: ./input -./regression.at:1713: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt input: -654. regression.at:1875: ok +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -242653,13 +240438,95 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: $PREPARSER ./calc input -662. push.at:167: ok - -stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + | (1 + # + 1) = 1111 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1492: cat stderr + | (# + 1) = 1111 stderr: +./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -242775,34 +240642,145 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -./regression.at:1714: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y - -671. c++.at:571: testing Variants lalr1.cc parse.assert api.token.constructor ... -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -672. c++.at:572: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} ... -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: cat stderr -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./calc.at:1492: "$PERL" -pi -e 'use strict; +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1479: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -242812,39 +240790,174 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -673. c++.at:573: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations ... -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1494: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stdout: -./existing.at:1460: $PREPARSER ./input -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -input: -./calc.at:1492: cat stderr +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () | (* *) + (*) + (*) ./calc.at:1494: $PREPARSER ./calc input -./regression.at:1714: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +686. c++.at:1361: testing Exception safety without error recovery ... +553. calc.at:1479: ok +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: cat stderr +./calc.at:1491: cat stderr stderr: -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS +input: +./calc.at:1491: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -242854,10 +240967,262 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () stderr: input: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS - | 1 + 2 * 3 + !+ ++ +./calc.at:1487: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1492: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +687. c++.at:1362: testing Exception safety with error recovery api.value.type=variant ... + | (1 + #) = 1111 +./calc.at:1494: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: Starting parse Entering state 0 Reading a token @@ -242973,10 +241338,138 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: $PREPARSER ./calc input -623. existing.at:1460: ok + | 1 + 2 * 3 + !+ ++ +input: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: cat stderr +input: +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS +./calc.at:1491: $PREPARSER ./calc input +stderr: + | (1 + # + 1) = 1111 +input: +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1487: $PREPARSER ./calc input ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +stderr: +./calc.at:1489: cat stderr +688. c++.at:1363: testing Exception safety without error recovery api.value.type=variant ... +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS + +input: +./calc.at:1489: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -243043,7 +241536,518 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: + | (# + 1) = 1111 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1485: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: +stderr: + | 1 + 2 * 3 + !+ ++ +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1492: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -243159,18 +242163,209 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +563. calc.at:1489: stderr: +./calc.at:1482: cat stderr + ok +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -stdout: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 - -./calc.at:1491: $PREPARSER ./calc input -./push.at:135: $PREPARSER ./input stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token Next token is token number (1.1: 1) Shifting token number (1.1: 1) Entering state 1 @@ -243234,6 +242429,93 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) stderr: +stderr: +input: + | (1 + 1) / (1 - 1) +./calc.at:1485: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -243245,38 +242527,267 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -stdout: -./regression.at:1772: $PREPARSER ./input --debug +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () input: +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr stderr: -./push.at:135: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !- ++ stderr: -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1491: $PREPARSER ./calc input +689. c++.at:1371: testing C++ GLR parser identifier shadowing ... Starting parse Entering state 0 -Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Now at end of input. -LAC: initial context established for "end of file" -LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) -memory exhausted -Cleanup: discarding lookahead token "end of file" () -Stack now 0 -./regression.at:1772: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -660. push.at:84: ok -stderr: +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy + | (1 + # + 1) = 1111 +./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -243334,16 +242845,29 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1494: cat stderr +./calc.at:1487: cat stderr -./regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y +stderr: +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -243354,32 +242878,124 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) +Next token is token '/' () +Shifting token '/' () Entering state 22 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -674. c++.at:574: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations api.value.automove ... -stderr: -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +input: +input: + | 1 + 2 * 3 + !- ++ Starting parse Entering state 0 Reading a token @@ -243445,15 +243061,12 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1494: cat stderr -675. c++.at:584: testing Variants and Typed Midrule Actions ... -======== Testing with C++ standard flags: '' +./calc.at:1492: $PREPARSER ./calc input stderr: -stdout: -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./regression.at:1876: $PREPARSER ./input -./calc.at:1492: "$PERL" -pi -e 'use strict; + | (1 + 1) / (1 - 1) +./calc.at:1492: $PREPARSER ./calc input +562. calc.at:1487: ok +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -243463,40 +243076,451 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1491: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1494: $PREPARSER ./calc input -stderr: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./calc.at:1492: cat stderr -stderr: -stdout: -./regression.at:1222: $PREPARSER ./expect2 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: cat stderr stderr: -stdout: -./regression.at:1145: $PREPARSER ./dancer -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + | (# + 1) = 1111 +./calc.at:1486: cat stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: cat stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1491: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1494: $PREPARSER ./calc input stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + | (1 + # + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input stderr: -stdout: -./regression.at:1714: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt stderr: -stdout: -./types.at:139: $PREPARSER ./test -syntax error, unexpected '+', expecting A or B +690. c++.at:1422: testing Shared locations ... Starting parse Entering state 0 Reading a token @@ -243554,26 +243578,18 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./regression.at:1876: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1222: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected ':' stderr: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1145: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (#) + (#) = 2222 -./calc.at:1492: $PREPARSER ./calc input -stderr: -stderr: +input: + Starting parse Entering state 0 Reading a token @@ -243639,8 +243655,6 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./regression.at:1714: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -243648,84 +243662,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -243734,29 +243766,110 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -======== Testing with C++ standard flags: '' -655. regression.at:1876: 643. regression.at:1222: ok - ok -640. regression.at:1145: ok -input: - | error + | (1 + 1) / (1 - 1) ./calc.at:1491: $PREPARSER ./calc input -./regression.at:1714: grep 'syntax error,' stderr.txt -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | (1 + # + 1) = 1111 stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: $PREPARSER ./calc input stderr: -./regression.at:1714: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./c++.at:1456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x1.cc x1.yy Starting parse Entering state 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -243771,77 +243884,49 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -243850,39 +243935,248 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - - - -stderr: -stdout: -./types.at:139: $PREPARSER ./test +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./types.at:139: $PREPARSER ./test -input: - | 1 + 2 * 3 + !- ++ -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1494: $PREPARSER ./calc input -676. c++.at:794: testing Doxygen Public Documentation ... -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:794: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: ./check stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -======== Testing with C++ standard flags: '' stderr: Starting parse Entering state 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -./regression.at:1714: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -243940,27 +244234,101 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -678. c++.at:848: testing Relative namespace references ... -./c++.at:849: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -677. c++.at:795: testing Doxygen Private Documentation ... -./c++.at:795: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:794: doxygen --version || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -stderr: -./c++.at:795: doxygen --version || exit 77 -stdout: -./types.at:139: ./check -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./calc.at:1485: cat stderr +./calc.at:1491: cat stderr ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -243971,21 +244339,291 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ---- /dev/null 2025-09-09 11:24:00.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/stderr 2025-09-09 13:30:54.103041628 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/test-source: line 180: doxygen: command not found +./calc.at:1492: cat stderr stderr: -stdout: -./regression.at:1714: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt ---- /dev/null 2025-09-09 11:24:00.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/stderr 2025-09-09 13:30:54.103041628 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/test-source: line 180: doxygen: command not found -stdout: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS + | 1 + 2 * 3 + !- ++ +./calc.at:1494: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +input: +691. c++.at:1517: testing Default action ... + | (#) + (#) = 2222 +stderr: +./calc.at:1491: $PREPARSER ./calc input +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: cat stderr +======== Testing with C++ standard flags: '' Starting parse Entering state 0 Reading a token @@ -244051,7 +244689,23 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: "$PERL" -pi -e 'use strict; + | (#) + (#) = 2222 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1482: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +559. calc.at:1485: ok +567. calc.at:1492: ok +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -244061,20 +244715,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -677. c++.at:795: skipped (c++.at:795) -676. c++.at:794: skipped (c++.at:794) -./calc.at:1492: cat stderr - -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - -./regression.at:1715: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -input: - | (1 + #) = 1111 -./calc.at:1492: $PREPARSER ./calc input -stderr: -stdout: ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -244085,10 +244725,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1491: cat stderr -./calc.at:1494: cat stderr +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1489: cat stderr +input: +stderr: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1482: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -244097,64 +244740,256 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -244167,98 +245002,133 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -input: +./calc.at:1494: cat stderr +692. java.at:25: testing Java invalid directives ... ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 - | 1 = 2 = 3 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1491: $PREPARSER ./calc input -679. c++.at:854: testing Absolute namespace references ... -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -680. c++.at:863: testing Syntactically invalid namespace references ... -./c++.at:864: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:855: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1456: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x1.o x1.cc +./calc.at:1486: cat stderr +./calc.at:1491: cat stderr +./java.at:35: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Reading a token @@ -244356,38 +245226,21 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1489: $PREPARSER ./calc input stderr: +input: +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: Starting parse Entering state 0 Reading a token @@ -244485,57 +245338,131 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (1 + 1) / (1 - 1) + | (1 + # + 1) = 1111 +./calc.at:1494: $PREPARSER ./calc input stderr: +./calc.at:1486: $PREPARSER ./calc input +565. calc.at:1491: ok Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () + +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -244546,131 +245473,134 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:1715: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: stderr: -stdout: -stdout: -./regression.at:1877: $PREPARSER ./input ./calc.at:1494: cat stderr -./c++.at:92: $PREPARSER ./input -stderr: -./calc.at:1491: cat stderr -stdout: -stderr: -./c++.at:865: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -input: -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./types.at:139: ./check -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (# + 1) = 1111 -./calc.at:1492: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -input: -input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - | - | +1 -./calc.at:1491: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1494: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1877: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: -656. regression.at:1877: ok +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -244678,11 +245608,11 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) + $1 = token "number" (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token @@ -244699,43 +245629,49 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -244744,231 +245680,369 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: +Cleanup: popping token end of input () +Cleanup: popping nterm input () stderr: -./types.at:139: ./check +./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./c++.at:868: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: - +./calc.at:1492: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./calc.at:1492: cat stderr -input: -./calc.at:1494: cat stderr -./calc.at:1491: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1492: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./c++.at:869: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +Cleanup: popping token end of input () +Cleanup: popping nterm input () stderr: -681. c++.at:884: testing Syntax error discarding no lookahead ... -./calc.at:1491: $PREPARSER ./calc /dev/null + | (#) + (#) = 2222 + +stdout: +stderr: +./calc.at:1494: $PREPARSER ./calc input +./existing.at:74: $PREPARSER ./input Starting parse Entering state 0 Reading a token @@ -244976,11 +246050,11 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) + $1 = token "number" (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token @@ -245000,8 +246074,8 @@ Next token is token '+' (1.8: ) Error: discarding token '+' (1.8: ) Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) Reading a token Next token is token ')' (1.11: ) Entering state 11 @@ -245019,11 +246093,11 @@ Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) + $1 = token "number" (1.15-18: 1111) -> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token @@ -245048,13 +246122,33 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -245070,49 +246164,77 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -245125,17 +246247,29 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) +input: + | (1 + #) = 1111 +./calc.at:1492: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1491: $PREPARSER ./calc input ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +618. existing.at:74: ok +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +./calc.at:1489: cat stderr +' expout || exit 77 +./calc.at:1482: cat stderr +stderr: stderr: +693. java.at:186: testing Java parser class and package names ... +./java.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y Starting parse Entering state 0 Reading a token @@ -245164,49 +246298,43 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -245219,8 +246347,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: Starting parse Entering state 0 Reading a token @@ -245228,18 +246354,26 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token Next token is token ')' (1.7: ) Entering state 11 @@ -245290,49 +246424,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: cat stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1492: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1491: cat stderr -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: Starting parse Entering state 0 @@ -245341,70 +246432,84 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 Reading a token -Next token is token ')' (1.11: ) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -245417,13 +246522,25 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + 1) / (1 - 1) -./c++.at:870: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +564. calc.at:1489: ok +./calc.at:1486: cat stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +557. calc.at:1482: ok +stderr: +stderr: + +./calc.at:1494: cat stderr stderr: Starting parse Entering state 0 @@ -245453,49 +246570,43 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -245508,8 +246619,9 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./java.at:50: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y +561. calc.at:1486: ok stderr: -./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -245529,90 +246641,52 @@ Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +Entering state 11 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -245625,8 +246699,13 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +694. java.at:217: testing Java parser class modifiers ... +./java.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +stdout: +stdout: +./c++.at:92: $PREPARSER ./input + +input: ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -245637,253 +246716,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) + | (1 + 1) / (1 - 1) +stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: $PREPARSER ./calc input + +./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: cat stderr + +./java.at:188: grep '[mb]4_' YYParser.y +stdout: +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./calc.at:1491: cat stderr +693. java.at:186: stderr: + skipped (java.at:188) stderr: Starting parse Entering state 0 @@ -245892,11 +246759,11 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) + $1 = token "number" (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token @@ -245904,11 +246771,11 @@ Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) + $1 = token "number" (1.6: 1) -> $$ = nterm exp (1.6: 1) Entering state 29 Reading a token @@ -245937,11 +246804,11 @@ Shifting token '(' (1.11: ) Entering state 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) + $1 = token "number" (1.12: 1) -> $$ = nterm exp (1.12: 1) Entering state 12 Reading a token @@ -245949,11 +246816,11 @@ Shifting token '-' (1.14: ) Entering state 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) + $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token @@ -245996,22 +246863,27 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +695. java.at:287: testing Java parser class extends and implements ... +input: +stdout: +./calc.at:1492: cat stderr +./java.at:289: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y + | (1 + #) = 1111 +./calc.at:1494: $PREPARSER ./calc input +./c++.at:1555: ./check +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + +stderr: +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y + | (# + 1) = 1111 + | (# + 1) = 1111 +./calc.at:1491: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -246020,234 +246892,64 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.28: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -246260,24 +246962,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: cat stderr -input: -680. c++.at:863: ok - | (1 + 1) / (1 - 1) -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -568. calc.at:1492: stderr: -./calc.at:1491: cat stderr - ok +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -246285,11 +246971,11 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) + $1 = token "number" (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token @@ -246297,11 +246983,11 @@ Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) + $1 = token "number" (1.6: 1) -> $$ = nterm exp (1.6: 1) Entering state 29 Reading a token @@ -246330,11 +247016,11 @@ Shifting token '(' (1.11: ) Entering state 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) + $1 = token "number" (1.12: 1) -> $$ = nterm exp (1.12: 1) Entering state 12 Reading a token @@ -246342,11 +247028,11 @@ Shifting token '-' (1.14: ) Entering state 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) + $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token @@ -246389,14 +247075,12 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -input: - | (!!) + (1 2) = 1 +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -246417,90 +247101,52 @@ Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +Entering state 11 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -246513,19 +247159,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: $PREPARSER ./calc input - -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: Starting parse Entering state 0 Reading a token @@ -246533,98 +247166,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -246637,8 +247228,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +692. java.at:25: ok Starting parse Entering state 0 Reading a token @@ -246646,98 +247236,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -246750,8 +247298,12 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: cat stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./java.at:219: grep '[mb]4_' YYParser.y +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +696. java.at:307: testing Java %parse-param and %lex-param ... +stdout: +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -246761,256 +247313,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -570. calc.at:1494: ok -./calc.at:1491: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input - -682. c++.at:1064: testing Syntax error as exception: lalr1.cc ... -stderr: -683. c++.at:1065: testing Syntax error as exception: glr.cc ... -./c++.at:1064: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:1065: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; +./java.at:309: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -247020,17 +247324,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr -======== Testing with C++ standard flags: '' -684. c++.at:1066: testing Syntax error as exception: glr2.cc ... -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: -./c++.at:1066: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy - | (* *) + (*) + (*) -./calc.at:1491: $PREPARSER ./calc input stderr: -======== Testing with C++ standard flags: '' Starting parse Entering state 0 Reading a token @@ -247038,102 +247332,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) Shifting token error (1.2: ) Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -247146,11 +247394,12 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +698. java.at:470: testing Java constructor init and init_throws ... +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./java.at:475: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y stderr: +./java.at:289: grep '[mb]4_' YYParser.y +699. java.at:497: testing Java value, position, and location types ... Starting parse Entering state 0 Reading a token @@ -247158,102 +247407,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) Shifting token error (1.2: ) Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -247266,6 +247469,11 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: cat stderr +./java.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./calc.at:1494: cat stderr + +694. java.at:217: stdout: ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -247276,290 +247484,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1491: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; +695. java.at:287: ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -247569,15 +247494,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + skipped (java.at:219) input: - | (#) + (#) = 2222 -./calc.at:1491: $PREPARSER ./calc input + skipped (java.at:289) + | (# + 1) = 1111 +569. calc.at:1494: ./calc.at:1494: $PREPARSER ./calc input + ok +./calc.at:1491: cat stderr +./calc.at:1492: cat stderr + +700. java.at:528: testing Java syntax error handling without error token ... stderr: +697. java.at:381: testing Java throws specifications ... Starting parse Entering state 0 Reading a token @@ -247592,77 +247520,49 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -247675,8 +247575,26 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./java.at:579: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret YYParser.y +input: + +./java.at:309: grep '[mb]4_' YYParser.y +./java.at:441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +input: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: + | (1 + # + 1) = 1111 + | (1 + # + 1) = 1111 +./calc.at:1491: $PREPARSER ./calc input +./java.at:475: grep '[mb]4_' YYParser.y +./java.at:499: grep '[mb]4_' YYParser.y +./calc.at:1492: $PREPARSER ./calc input + +696. java.at:307: stderr: stderr: +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +stdout: Starting parse Entering state 0 Reading a token @@ -247691,77 +247609,49 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -247775,24 +247665,6 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1491: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token @@ -247821,43 +247693,49 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -247870,8 +247748,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + skipped (java.at:309) Starting parse Entering state 0 Reading a token @@ -247900,43 +247777,49 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -247949,84 +247832,106 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +702. javapush.at:217: testing Trivial Push Parser with %initial-action ... +./javapush.at:227: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y +701. javapush.at:172: testing Trivial Push Parser with api.push-pull verification ... +./javapush.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=pull -o Main.java input.y + +./java.at:441: grep '[mb]4_' YYParser.y +./javapush.at:228: grep -c '^System.err.println("Initial action invoked");$' Main.java +./javapush.at:182: grep -c '^.*public boolean parse().*$' Main.java +703. d.at:103: testing D parser class extends and implements ... +./d.at:106: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +704. d.at:138: testing D parser class api.token.raw true by default ... +./d.at:141: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +705. cxx-type.at:409: testing GLR: Resolve ambiguity, impure, no locations ... +./d.at:141: grep '[mb]4_' YYParser.y +./cxx-type.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +./d.at:106: grep '[mb]4_' YYParser.y stderr: +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:410: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS stdout: -./types.at:139: ./check -./calc.at:1491: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: - | (# + 1) = 1111 -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./calc.at:1491: $PREPARSER ./calc input -stderr: -stderr: -Starting parse +stdout: +700. java.at:528: stdout: + skipped (java.at:580) +704. d.at:138: ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +699. java.at:497: 698. java.at:470: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +697. java.at:381: stderr: + skipped (d.at:141) +703. d.at:103: Starting parse Entering state 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -248039,25 +247944,23 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stdout: -./regression.at:1788: $PREPARSER ./input --debug -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" () -LAC: initial context established for "invalid token" -LAC: checking lookahead "invalid token": Always Err -Constructing syntax error message -LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) -syntax error -memory exhausted -Cleanup: discarding lookahead token "invalid token" () -Stack now 0 -./regression.at:1788: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (java.at:475) +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: + skipped (java.at:499) + +./javapush.at:187: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java + skipped (java.at:441) + skipped (d.at:106) Starting parse Entering state 0 Reading a token @@ -248065,56 +247968,70 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -248127,118 +248044,102 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -652. regression.at:1739: ok -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +702. javapush.at:217: ./calc.at:1494: cat stderr + + skipped (javapush.at:230) + + + + -stderr: -input: stderr: stdout: - | (1 + # + 1) = 1111 -input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] -input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] -input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] -input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] -input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] -input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] -input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] -input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] -input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] -input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] -input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] -input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./types.at:139: $PREPARSER ./test +./regression.at:1715: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +706. cxx-type.at:415: testing GLR: Resolve ambiguity, impure, locations ... +709. cxx-type.at:432: testing GLR: Merge conflicting parses, impure, no locations ... +./cxx-type.at:416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +./cxx-type.at:433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +711. cxx-type.at:444: testing GLR: Merge conflicting parses, pure, no locations ... +./cxx-type.at:445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +707. cxx-type.at:420: testing GLR: Resolve ambiguity, pure, no locations ... +708. cxx-type.at:426: testing GLR: Resolve ambiguity, pure, locations ... +./cxx-type.at:421: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +710. cxx-type.at:438: testing GLR: Merge conflicting parses, impure, locations ... +./cxx-type.at:427: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +./cxx-type.at:439: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +712. cxx-type.at:449: testing GLR: Merge conflicting parses, pure, locations ... +./cxx-type.at:450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y stderr: -./calc.at:1491: $PREPARSER ./calc input +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:433: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +stderr: +stdout: +./c++.at:1360: ./exceptions || exit 77 +stderr: +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./javapush.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=both -o Main.java input.y +stderr: +stderr: +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stderr: +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stderr: +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stderr: +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +./cxx-type.at:445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +./cxx-type.at:450: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +./cxx-type.at:439: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +./cxx-type.at:416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +./cxx-type.at:421: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +./cxx-type.at:427: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +stderr: + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Inner caught +Outer caught + | (1 + # + 1) = 1111 +stderr: +./calc.at:1494: $PREPARSER ./calc input +./c++.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy +./regression.at:1715: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: cat stderr +./regression.at:1715: grep 'syntax error,' stderr.txt stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -248322,11 +248223,15 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 -======== Testing with C++ standard flags: '' -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1492: cat stderr +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: +input: stderr: +stdout: + | (1 + 1) / (1 - 1) Starting parse Entering state 0 Reading a token @@ -248410,8 +248315,149 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./calc.at:1491: "$PERL" -pi -e 'use strict; +stdout: +./c++.at:1362: ./exceptions || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1492: $PREPARSER ./calc input +./regression.at:1715: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1788: $PREPARSER ./input --debug +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Inner caught +Outer caught +stderr: +./javapush.at:192: grep -c '^.*public boolean parse().*$' Main.java +./c++.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" () +LAC: initial context established for "invalid token" +LAC: checking lookahead "invalid token": Always Err +Constructing syntax error message +LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) +syntax error +memory exhausted +Cleanup: discarding lookahead token "invalid token" () +Stack now 0 +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -248422,17 +248468,8 @@ }eg ' expout || exit 77 stderr: -stdout: -./calc.at:1491: cat stderr -./existing.at:74: $PREPARSER ./input -685. c++.at:1360: testing Exception safety with error recovery ... -input: -stderr: -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS - | (1 + 1) / (1 - 1) -./calc.at:1491: $PREPARSER ./calc input -./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./regression.at:1715: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1788: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -248549,8 +248586,9 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -618. existing.at:74: ok +./javapush.at:195: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -248666,2270 +248704,517 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - -stdout: -./types.at:139: $PREPARSER ./test -./calc.at:1491: cat stderr -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -566. calc.at:1491: ok -stderr: -======== Testing with C++ standard flags: '' -stdout: -stderr: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - stderr: stdout: -./regression.at:1715: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -stderr: -./regression.at:1715: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1715: grep 'syntax error,' stderr.txt -686. c++.at:1361: testing Exception safety without error recovery ... -./regression.at:1715: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -./regression.at:1715: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -687. c++.at:1362: testing Exception safety with error recovery api.value.type=variant ... -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -stderr: +./calc.at:1494: cat stderr ./regression.at:1715: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -stdout: -./types.at:139: $PREPARSER ./test -stderr: -stdout: -stderr: -./c++.at:235: $PREPARSER ./list -./regression.at:1716: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -stderr: -stdout: -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./regression.at:1716: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -stderr: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stdout: -./c++.at:849: $PREPARSER ./input -stderr: -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -======== Testing with C++ standard flags: '' -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./regression.at:1878: $PREPARSER ./input -stderr: -./regression.at:1878: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -657. regression.at:1878: ok - -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -688. c++.at:1363: testing Exception safety without error recovery api.value.type=variant ... -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./c++.at:855: $PREPARSER ./input -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./c++.at:1360: ./exceptions || exit 77 -stderr: -Inner caught -Outer caught -./c++.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stderr: -stdout: -stdout: -./c++.at:1361: ./exceptions || exit 77 -./types.at:139: $PREPARSER ./test -stderr: -Inner caught -Outer caught -======== Testing with C++ standard flags: '' -stderr: -./c++.at:1361: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1362: ./exceptions || exit 77 -stderr: -Inner caught -Outer caught -./c++.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./c++.at:566: $here/modern -stdout: -Modern C++: 201703 -./c++.at:566: $PREPARSER ./list -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -======== Testing with C++ standard flags: '' -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stderr: -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -stdout: -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:570: $here/modern -stdout: -Modern C++: 201703 -./c++.at:570: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: +652. regression.at:1739: ok +./javapush.at:199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y ./c++.at:1363: ./exceptions || exit 77 stderr: stderr: -Inner caught -Outer caught -stdout: -./c++.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -./regression.at:1716: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -stderr: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./regression.at:1716: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1716: grep 'syntax error,' stderr.txt -./regression.at:1716: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -stderr: -./regression.at:1716: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -stdout: -./c++.at:568: $here/modern -./regression.at:1716: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -stdout: -Modern C++: 201703 -./c++.at:568: $PREPARSER ./list -stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./regression.at:1719: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.cc input.y -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./regression.at:1719: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:567: $here/modern -stdout: -Modern C++: 201703 -./c++.at:567: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:659: $PREPARSER ./input -stderr: +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:573: $here/modern -stdout: -Modern C++: 201703 -./c++.at:573: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stdout: -stderr: -./c++.at:572: $here/modern -stdout: -./c++.at:92: $PREPARSER ./input -stdout: -Modern C++: 201703 -./c++.at:572: $PREPARSER ./list -stderr: -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stdout: -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -./c++.at:855: $PREPARSER ./input -stderr: -stdout: -./c++.at:1065: $PREPARSER ./input < in -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./torture.at:395: $PREPARSER ./input -stderr: -======== Testing with C++ standard flags: '' -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -./torture.at:395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -613. torture.at:385: ok -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -689. c++.at:1371: testing C++ GLR parser identifier shadowing ... -./c++.at:1410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:569: $here/modern -stdout: -Modern C++: 201703 -./c++.at:569: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:574: $here/modern -stdout: -Modern C++: 201703 -./c++.at:574: $PREPARSER ./list -stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:571: $here/modern -stdout: -Modern C++: 201703 -./c++.at:571: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -stdout: -./c++.at:1361: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token -0x557de4ef0f40->Object::Object { } -Next token is token 'a' (0x557de4ef0f40 'a') -Shifting token 'a' (0x557de4ef0f40 'a') +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557de4ef0f40 'a') --> $$ = nterm item (0x557de4ef0f40 'a') -Entering state 10 -Stack now 0 10 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -0x557de4ef0f90->Object::Object { 0x557de4ef0f40 } -Next token is token 'a' (0x557de4ef0f90 'a') -Shifting token 'a' (0x557de4ef0f90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557de4ef0f90 'a') --> $$ = nterm item (0x557de4ef0f90 'a') -Entering state 10 -Stack now 0 10 10 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -0x557de4ef0fe0->Object::Object { 0x557de4ef0f40, 0x557de4ef0f90 } -Next token is token 'a' (0x557de4ef0fe0 'a') -Shifting token 'a' (0x557de4ef0fe0 'a') +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557de4ef0fe0 'a') --> $$ = nterm item (0x557de4ef0fe0 'a') -Entering state 10 -Stack now 0 10 10 10 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 Reading a token -0x557de4ef1030->Object::Object { 0x557de4ef0f40, 0x557de4ef0f90, 0x557de4ef0fe0 } -Next token is token 'a' (0x557de4ef1030 'a') -Shifting token 'a' (0x557de4ef1030 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557de4ef1030 'a') --> $$ = nterm item (0x557de4ef1030 'a') -Entering state 10 -Stack now 0 10 10 10 10 +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 Reading a token -0x557de4ef1080->Object::Object { 0x557de4ef0f40, 0x557de4ef0f90, 0x557de4ef0fe0, 0x557de4ef1030 } -Next token is token 'p' (0x557de4ef1080 'p'Exception caught: cleaning lookahead and stack -0x557de4ef1080->Object::~Object { 0x557de4ef0f40, 0x557de4ef0f90, 0x557de4ef0fe0, 0x557de4ef1030, 0x557de4ef1080 } -0x557de4ef1030->Object::~Object { 0x557de4ef0f40, 0x557de4ef0f90, 0x557de4ef0fe0, 0x557de4ef1030 } -0x557de4ef0fe0->Object::~Object { 0x557de4ef0f40, 0x557de4ef0f90, 0x557de4ef0fe0 } -0x557de4ef0f90->Object::~Object { 0x557de4ef0f40, 0x557de4ef0f90 } -0x557de4ef0f40->Object::~Object { 0x557de4ef0f40 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -0x557de4ef0f40->Object::Object { } -Next token is token 'a' (0x557de4ef0f40 'a') -Shifting token 'a' (0x557de4ef0f40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557de4ef0f40 'a') --> $$ = nterm item (0x557de4ef0f40 'a') -Entering state 10 -Stack now 0 10 +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 Reading a token -0x557de4ef0f90->Object::Object { 0x557de4ef0f40 } -Next token is token 'a' (0x557de4ef0f90 'a') -Shifting token 'a' (0x557de4ef0f90 'a') +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557de4ef0f90 'a') --> $$ = nterm item (0x557de4ef0f90 'a') -Entering state 10 -Stack now 0 10 10 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -0x557de4ef0fe0->Object::Object { 0x557de4ef0f40, 0x557de4ef0f90 } -Next token is token 'a' (0x557de4ef0fe0 'a') -Shifting token 'a' (0x557de4ef0fe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557de4ef0fe0 'a') --> $$ = nterm item (0x557de4ef0fe0 'a') -Entering state 10 -Stack now 0 10 10 10 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token -0x557de4ef1030->Object::Object { 0x557de4ef0f40, 0x557de4ef0f90, 0x557de4ef0fe0 } -Next token is token 'a' (0x557de4ef1030 'a') -Shifting token 'a' (0x557de4ef1030 'a') +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557de4ef1030 'a') --> $$ = nterm item (0x557de4ef1030 'a') -Entering state 10 -Stack now 0 10 10 10 10 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -0x557de4ef1080->Object::Object { 0x557de4ef0f40, 0x557de4ef0f90, 0x557de4ef0fe0, 0x557de4ef1030 } -Next token is token 'p' (0x557de4ef1080 'p'Exception caught: cleaning lookahead and stack -0x557de4ef1080->Object::~Object { 0x557de4ef0f40, 0x557de4ef0f90, 0x557de4ef0fe0, 0x557de4ef1030, 0x557de4ef1080 } -0x557de4ef1030->Object::~Object { 0x557de4ef0f40, 0x557de4ef0f90, 0x557de4ef0fe0, 0x557de4ef1030 } -0x557de4ef0fe0->Object::~Object { 0x557de4ef0f40, 0x557de4ef0f90, 0x557de4ef0fe0 } -0x557de4ef0f90->Object::~Object { 0x557de4ef0f40, 0x557de4ef0f90 } -0x557de4ef0f40->Object::~Object { 0x557de4ef0f40 } -exception caught: printer -end { } -./c++.at:1361: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE -stderr: -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -stdout: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:235: $PREPARSER ./list -stderr: -stderr: -./c++.at:1361: $PREPARSER ./input aaaaT -stdout: -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -error: invalid expression -caught error -error: invalid character -caught error -stderr: -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR -stdout: -stderr: -stderr: -./c++.at:92: $PREPARSER ./input -stdout: -./c++.at:1064: $PREPARSER ./input < in -./c++.at:566: $here/modern -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -stdout: -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Legac++ -./c++.at:566: $PREPARSER ./list -./c++.at:1064: $PREPARSER ./input < in -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stderr: -error: invalid character -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:1360: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -./regression.at:1719: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -./c++.at:1360: $PREPARSER ./input i -stderr: -stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Inner caught +Outer caught +./c++.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -exception caught: initial-action +input: + | (1 + 1) / (1 - 1) +./calc.at:1494: $PREPARSER ./calc input stdout: +./regression.at:1716: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +./c++.at:1361: ./exceptions || exit 77 +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:659: $PREPARSER ./input -./regression.at:1719: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal -stderr: +./calc.at:1491: cat stderr Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./regression.at:1719: grep 'syntax error,' stderr.txt -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaap -stderr: -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1719: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -exception caught: yylex -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input --debug aaaap -./regression.at:1719: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -0x55f9127fef40->Object::Object { } -Next token is token 'a' (0x55f9127fef40 'a') -Shifting token 'a' (0x55f9127fef40 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f9127fef40 'a') --> $$ = nterm item (0x55f9127fef40 'a') -Entering state 11 -Stack now 0 11 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -0x55f9127fef90->Object::Object { 0x55f9127fef40 } -Next token is token 'a' (0x55f9127fef90 'a') -Shifting token 'a' (0x55f9127fef90 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f9127fef90 'a') --> $$ = nterm item (0x55f9127fef90 'a') -Entering state 11 -Stack now 0 11 11 +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 Reading a token -0x55f9127fefe0->Object::Object { 0x55f9127fef40, 0x55f9127fef90 } -Next token is token 'a' (0x55f9127fefe0 'a') -Shifting token 'a' (0x55f9127fefe0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f9127fefe0 'a') --> $$ = nterm item (0x55f9127fefe0 'a') -Entering state 11 -Stack now 0 11 11 11 +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 Reading a token -0x55f9127ff030->Object::Object { 0x55f9127fef40, 0x55f9127fef90, 0x55f9127fefe0 } -Next token is token 'a' (0x55f9127ff030 'a') -Shifting token 'a' (0x55f9127ff030 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f9127ff030 'a') --> $$ = nterm item (0x55f9127ff030 'a') -Entering state 11 -Stack now 0 11 11 11 11 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -0x55f9127ff080->Object::Object { 0x55f9127fef40, 0x55f9127fef90, 0x55f9127fefe0, 0x55f9127ff030 } -Next token is token 'p' (0x55f9127ff080 'p'Exception caught: cleaning lookahead and stack -0x55f9127ff080->Object::~Object { 0x55f9127fef40, 0x55f9127fef90, 0x55f9127fefe0, 0x55f9127ff030, 0x55f9127ff080 } -0x55f9127ff030->Object::~Object { 0x55f9127fef40, 0x55f9127fef90, 0x55f9127fefe0, 0x55f9127ff030 } -0x55f9127fefe0->Object::~Object { 0x55f9127fef40, 0x55f9127fef90, 0x55f9127fefe0 } -0x55f9127fef90->Object::~Object { 0x55f9127fef40, 0x55f9127fef90 } -0x55f9127fef40->Object::~Object { 0x55f9127fef40 } -exception caught: printer -end { } -./c++.at:1363: $PREPARSER ./input i -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -exception caught: initial-action -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 Reading a token -0x55f9127fef40->Object::Object { } -Next token is token 'a' (0x55f9127fef40 'a') -Shifting token 'a' (0x55f9127fef40 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f9127fef40 'a') --> $$ = nterm item (0x55f9127fef40 'a') -Entering state 11 -Stack now 0 11 +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -0x55f9127fef90->Object::Object { 0x55f9127fef40 } -Next token is token 'a' (0x55f9127fef90 'a') -Shifting token 'a' (0x55f9127fef90 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f9127fef90 'a') --> $$ = nterm item (0x55f9127fef90 'a') -Entering state 11 -Stack now 0 11 11 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token -0x55f9127fefe0->Object::Object { 0x55f9127fef40, 0x55f9127fef90 } -Next token is token 'a' (0x55f9127fefe0 'a') -Shifting token 'a' (0x55f9127fefe0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f9127fefe0 'a') --> $$ = nterm item (0x55f9127fefe0 'a') -Entering state 11 -Stack now 0 11 11 11 +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -0x55f9127ff030->Object::Object { 0x55f9127fef40, 0x55f9127fef90, 0x55f9127fefe0 } -Next token is token 'a' (0x55f9127ff030 'a') -Shifting token 'a' (0x55f9127ff030 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f9127ff030 'a') --> $$ = nterm item (0x55f9127ff030 'a') -Entering state 11 -Stack now 0 11 11 11 11 +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 Reading a token -0x55f9127ff080->Object::Object { 0x55f9127fef40, 0x55f9127fef90, 0x55f9127fefe0, 0x55f9127ff030 } -Next token is token 'p' (0x55f9127ff080 'p'Exception caught: cleaning lookahead and stack -0x55f9127ff080->Object::~Object { 0x55f9127fef40, 0x55f9127fef90, 0x55f9127fefe0, 0x55f9127ff030, 0x55f9127ff080 } -0x55f9127ff030->Object::~Object { 0x55f9127fef40, 0x55f9127fef90, 0x55f9127fefe0, 0x55f9127ff030 } -0x55f9127fefe0->Object::~Object { 0x55f9127fef40, 0x55f9127fef90, 0x55f9127fefe0 } -0x55f9127fef90->Object::~Object { 0x55f9127fef40, 0x55f9127fef90 } -0x55f9127fef40->Object::~Object { 0x55f9127fef40 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.java input.y -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -./c++.at:1363: $PREPARSER ./input aaaap -stderr: -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./javapush.at:200: grep -c '^.*public boolean parse().*$' Main.java stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap -./c++.at:1360: $PREPARSER ./input aaaaE +Inner caught +Outer caught +./calc.at:1492: cat stderr +./javapush.at:203: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java +566. calc.at:1491: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +./c++.at:1361: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -0x7ffc5bedc91f->Object::Object { } -0x7ffc5bedca00->Object::Object { 0x7ffc5bedc91f } -0x7ffc5bedc91f->Object::~Object { 0x7ffc5bedc91f, 0x7ffc5bedca00 } -Next token is token 'a' (0x7ffc5bedca00 'a') -0x7ffc5bedc940->Object::Object { 0x7ffc5bedca00 } -0x7ffc5bedca00->Object::~Object { 0x7ffc5bedc940, 0x7ffc5bedca00 } -Shifting token 'a' (0x7ffc5bedc940 'a') -0x55903a2fc2e0->Object::Object { 0x7ffc5bedc940 } -0x7ffc5bedc940->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedc940 } -Entering state 1 -Stack now 0 1 -0x7ffc5bedca20->Object::Object { 0x55903a2fc2e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55903a2fc2e0 'a') --> $$ = nterm item (0x7ffc5bedca20 'a') -0x55903a2fc2e0->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedca20 } -0x55903a2fc2e0->Object::Object { 0x7ffc5bedca20 } -0x7ffc5bedca20->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedca20 } -Entering state 10 -Stack now 0 10 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -0x7ffc5bedc91f->Object::Object { 0x55903a2fc2e0 } -0x7ffc5bedca00->Object::Object { 0x55903a2fc2e0, 0x7ffc5bedc91f } -0x7ffc5bedc91f->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedc91f, 0x7ffc5bedca00 } -Next token is token 'a' (0x7ffc5bedca00 'a') -0x7ffc5bedc940->Object::Object { 0x55903a2fc2e0, 0x7ffc5bedca00 } -0x7ffc5bedca00->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedc940, 0x7ffc5bedca00 } -Shifting token 'a' (0x7ffc5bedc940 'a') -0x55903a2fc300->Object::Object { 0x55903a2fc2e0, 0x7ffc5bedc940 } -0x7ffc5bedc940->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedc940 } +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 10 1 -0x7ffc5bedca20->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55903a2fc300 'a') --> $$ = nterm item (0x7ffc5bedca20 'a') -0x55903a2fc300->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedca20 } -0x55903a2fc300->Object::Object { 0x55903a2fc2e0, 0x7ffc5bedca20 } -0x7ffc5bedca20->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedca20 } -Entering state 10 -Stack now 0 10 10 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -0x7ffc5bedc91f->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300 } -0x7ffc5bedca00->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedc91f } -0x7ffc5bedc91f->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedc91f, 0x7ffc5bedca00 } -Next token is token 'a' (0x7ffc5bedca00 'a') -0x7ffc5bedc940->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedca00 } -0x7ffc5bedca00->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedc940, 0x7ffc5bedca00 } -Shifting token 'a' (0x7ffc5bedc940 'a') -0x55903a2fc320->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedc940 } -0x7ffc5bedc940->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedc940 } -Entering state 1 -Stack now 0 10 10 1 -0x7ffc5bedca20->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55903a2fc320 'a') --> $$ = nterm item (0x7ffc5bedca20 'a') -0x55903a2fc320->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedca20 } -0x55903a2fc320->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedca20 } -0x7ffc5bedca20->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedca20 } -Entering state 10 -Stack now 0 10 10 10 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -0x7ffc5bedc91f->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320 } -0x7ffc5bedca00->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedc91f } -0x7ffc5bedc91f->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedc91f, 0x7ffc5bedca00 } -Next token is token 'a' (0x7ffc5bedca00 'a') -0x7ffc5bedc940->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedca00 } -0x7ffc5bedca00->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedc940, 0x7ffc5bedca00 } -Shifting token 'a' (0x7ffc5bedc940 'a') -0x55903a2fc340->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedc940 } -0x7ffc5bedc940->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedc940 } +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 10 10 10 1 -0x7ffc5bedca20->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55903a2fc340 'a') --> $$ = nterm item (0x7ffc5bedca20 'a') -0x55903a2fc340->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedca20 } -0x55903a2fc340->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedca20 } -0x7ffc5bedca20->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedca20 } -Entering state 10 -Stack now 0 10 10 10 10 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 Reading a token -0x7ffc5bedc91f->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340 } -0x7ffc5bedca00->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedc91f } -0x7ffc5bedc91f->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedc91f, 0x7ffc5bedca00 } -Next token is token 'p' (0x7ffc5bedca00 'p'Exception caught: cleaning lookahead and stack -0x55903a2fc340->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedca00 } -0x55903a2fc320->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedca00 } -0x55903a2fc300->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedca00 } -0x55903a2fc2e0->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedca00 } -0x7ffc5bedca00->Object::~Object { 0x7ffc5bedca00 } -exception caught: printer -end { } -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: syntax error, unexpected end of file, expecting 'a' -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 Reading a token -0x7ffc5bedc91f->Object::Object { } -0x7ffc5bedca00->Object::Object { 0x7ffc5bedc91f } -0x7ffc5bedc91f->Object::~Object { 0x7ffc5bedc91f, 0x7ffc5bedca00 } -Next token is token 'a' (0x7ffc5bedca00 'a') -0x7ffc5bedc940->Object::Object { 0x7ffc5bedca00 } -0x7ffc5bedca00->Object::~Object { 0x7ffc5bedc940, 0x7ffc5bedca00 } -Shifting token 'a' (0x7ffc5bedc940 'a') -0x55903a2fc2e0->Object::Object { 0x7ffc5bedc940 } -0x7ffc5bedc940->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedc940 } -Entering state 1 -Stack now 0 1 -0x7ffc5bedca20->Object::Object { 0x55903a2fc2e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55903a2fc2e0 'a') --> $$ = nterm item (0x7ffc5bedca20 'a') -0x55903a2fc2e0->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedca20 } -0x55903a2fc2e0->Object::Object { 0x7ffc5bedca20 } -0x7ffc5bedca20->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedca20 } -Entering state 10 -Stack now 0 10 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -0x7ffc5bedc91f->Object::Object { 0x55903a2fc2e0 } -0x7ffc5bedca00->Object::Object { 0x55903a2fc2e0, 0x7ffc5bedc91f } -0x7ffc5bedc91f->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedc91f, 0x7ffc5bedca00 } -Next token is token 'a' (0x7ffc5bedca00 'a') -0x7ffc5bedc940->Object::Object { 0x55903a2fc2e0, 0x7ffc5bedca00 } -0x7ffc5bedca00->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedc940, 0x7ffc5bedca00 } -Shifting token 'a' (0x7ffc5bedc940 'a') -0x55903a2fc300->Object::Object { 0x55903a2fc2e0, 0x7ffc5bedc940 } -0x7ffc5bedc940->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedc940 } -Entering state 1 -Stack now 0 10 1 -0x7ffc5bedca20->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55903a2fc300 'a') --> $$ = nterm item (0x7ffc5bedca20 'a') -0x55903a2fc300->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedca20 } -0x55903a2fc300->Object::Object { 0x55903a2fc2e0, 0x7ffc5bedca20 } -0x7ffc5bedca20->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedca20 } -Entering state 10 -Stack now 0 10 10 +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 Reading a token -0x7ffc5bedc91f->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300 } -0x7ffc5bedca00->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedc91f } -0x7ffc5bedc91f->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedc91f, 0x7ffc5bedca00 } -Next token is token 'a' (0x7ffc5bedca00 'a') -0x7ffc5bedc940->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedca00 } -0x7ffc5bedca00->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedc940, 0x7ffc5bedca00 } -Shifting token 'a' (0x7ffc5bedc940 'a') -0x55903a2fc320->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedc940 } -0x7ffc5bedc940->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedc940 } +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 10 10 1 -0x7ffc5bedca20->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55903a2fc320 'a') --> $$ = nterm item (0x7ffc5bedca20 'a') -0x55903a2fc320->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedca20 } -0x55903a2fc320->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedca20 } -0x7ffc5bedca20->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedca20 } -Entering state 10 -Stack now 0 10 10 10 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token -0x7ffc5bedc91f->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320 } -0x7ffc5bedca00->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedc91f } -0x7ffc5bedc91f->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedc91f, 0x7ffc5bedca00 } -Next token is token 'a' (0x7ffc5bedca00 'a') -0x7ffc5bedc940->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedca00 } -0x7ffc5bedca00->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedc940, 0x7ffc5bedca00 } -Shifting token 'a' (0x7ffc5bedc940 'a') -0x55903a2fc340->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedc940 } -0x7ffc5bedc940->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedc940 } +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 10 10 10 1 -0x7ffc5bedca20->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55903a2fc340 'a') --> $$ = nterm item (0x7ffc5bedca20 'a') -0x55903a2fc340->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedca20 } -0x55903a2fc340->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedca20 } -0x7ffc5bedca20->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedca20 } -Entering state 10 -Stack now 0 10 10 10 10 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -0x7ffc5bedc91f->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340 } -0x7ffc5bedca00->Object::Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedc91f } -0x7ffc5bedc91f->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedc91f, 0x7ffc5bedca00 } -Next token is token 'p' (0x7ffc5bedca00 'p'Exception caught: cleaning lookahead and stack -0x55903a2fc340->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x55903a2fc340, 0x7ffc5bedca00 } -0x55903a2fc320->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x55903a2fc320, 0x7ffc5bedca00 } -0x55903a2fc300->Object::~Object { 0x55903a2fc2e0, 0x55903a2fc300, 0x7ffc5bedca00 } -0x55903a2fc2e0->Object::~Object { 0x55903a2fc2e0, 0x7ffc5bedca00 } -0x7ffc5bedca00->Object::~Object { 0x7ffc5bedca00 } -exception caught: printer -end { } -651. regression.at:1628: ./c++.at:1363: grep '^exception caught: printer$' stderr - skipped (regression.at:1727) -./c++.at:1360: $PREPARSER ./input aaaaT -stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -stderr: -stderr: -stdout: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -exception caught: syntax error -./c++.at:1360: $PREPARSER ./input aaaaR - -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./c++.at:1363: $PREPARSER ./input aaaaE -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:941: $PREPARSER ./input -stderr: -======== Testing with C++ standard flags: '' -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:1363: $PREPARSER ./input aaaaT -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +568. calc.at:1492: ok +701. javapush.at:172: skipped (javapush.at:207) stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -690. c++.at:1422: testing Shared locations ... -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x1.cc x1.yy -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1456: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x1.o x1.cc stderr: stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: stdout: + +713. cxx-type.at:455: testing GLR: Verbose messages, resolve ambiguity, impure, no locations ... +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:1716: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./cxx-type.at:456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y + ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:1362: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input --debug aaaap -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffefe43798f->Object::Object { } -0x7ffefe437a70->Object::Object { 0x7ffefe43798f } -0x7ffefe43798f->Object::~Object { 0x7ffefe43798f, 0x7ffefe437a70 } -Next token is token 'a' (0x7ffefe437a70 'a') -0x7ffefe4379b0->Object::Object { 0x7ffefe437a70 } -0x7ffefe437a70->Object::~Object { 0x7ffefe4379b0, 0x7ffefe437a70 } -Shifting token 'a' (0x7ffefe4379b0 'a') -0x55eb64be42e0->Object::Object { 0x7ffefe4379b0 } -0x7ffefe4379b0->Object::~Object { 0x55eb64be42e0, 0x7ffefe4379b0 } -Entering state 2 -Stack now 0 2 -0x7ffefe437a90->Object::Object { 0x55eb64be42e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55eb64be42e0 'a') --> $$ = nterm item (0x7ffefe437a90 'a') -0x55eb64be42e0->Object::~Object { 0x55eb64be42e0, 0x7ffefe437a90 } -0x55eb64be42e0->Object::Object { 0x7ffefe437a90 } -0x7ffefe437a90->Object::~Object { 0x55eb64be42e0, 0x7ffefe437a90 } -Entering state 11 -Stack now 0 11 -Reading a token -0x7ffefe43798f->Object::Object { 0x55eb64be42e0 } -0x7ffefe437a70->Object::Object { 0x55eb64be42e0, 0x7ffefe43798f } -0x7ffefe43798f->Object::~Object { 0x55eb64be42e0, 0x7ffefe43798f, 0x7ffefe437a70 } -Next token is token 'a' (0x7ffefe437a70 'a') -0x7ffefe4379b0->Object::Object { 0x55eb64be42e0, 0x7ffefe437a70 } -0x7ffefe437a70->Object::~Object { 0x55eb64be42e0, 0x7ffefe4379b0, 0x7ffefe437a70 } -Shifting token 'a' (0x7ffefe4379b0 'a') -0x55eb64be4300->Object::Object { 0x55eb64be42e0, 0x7ffefe4379b0 } -0x7ffefe4379b0->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe4379b0 } -Entering state 2 -Stack now 0 11 2 -0x7ffefe437a90->Object::Object { 0x55eb64be42e0, 0x55eb64be4300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55eb64be4300 'a') --> $$ = nterm item (0x7ffefe437a90 'a') -0x55eb64be4300->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe437a90 } -0x55eb64be4300->Object::Object { 0x55eb64be42e0, 0x7ffefe437a90 } -0x7ffefe437a90->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe437a90 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0x7ffefe43798f->Object::Object { 0x55eb64be42e0, 0x55eb64be4300 } -0x7ffefe437a70->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe43798f } -0x7ffefe43798f->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe43798f, 0x7ffefe437a70 } -Next token is token 'a' (0x7ffefe437a70 'a') -0x7ffefe4379b0->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe437a70 } -0x7ffefe437a70->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe4379b0, 0x7ffefe437a70 } -Shifting token 'a' (0x7ffefe4379b0 'a') -0x55eb64be4320->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe4379b0 } -0x7ffefe4379b0->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe4379b0 } -Entering state 2 -Stack now 0 11 11 2 -0x7ffefe437a90->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55eb64be4320 'a') --> $$ = nterm item (0x7ffefe437a90 'a') -0x55eb64be4320->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe437a90 } -0x55eb64be4320->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe437a90 } -0x7ffefe437a90->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe437a90 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x7ffefe43798f->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320 } -0x7ffefe437a70->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe43798f } -0x7ffefe43798f->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe43798f, 0x7ffefe437a70 } -Next token is token 'a' (0x7ffefe437a70 'a') -0x7ffefe4379b0->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe437a70 } -0x7ffefe437a70->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe4379b0, 0x7ffefe437a70 } -Shifting token 'a' (0x7ffefe4379b0 'a') -0x55eb64be4340->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe4379b0 } -0x7ffefe4379b0->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe4379b0 } -Entering state 2 -Stack now 0 11 11 11 2 -0x7ffefe437a90->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55eb64be4340 'a') --> $$ = nterm item (0x7ffefe437a90 'a') -0x55eb64be4340->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe437a90 } -0x55eb64be4340->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe437a90 } -0x7ffefe437a90->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe437a90 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x7ffefe43798f->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340 } -0x7ffefe437a70->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe43798f } -0x7ffefe43798f->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe43798f, 0x7ffefe437a70 } -Next token is token 'p' (0x7ffefe437a70 'p'Exception caught: cleaning lookahead and stack -0x55eb64be4340->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe437a70 } -0x55eb64be4320->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe437a70 } -0x55eb64be4300->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe437a70 } -0x55eb64be42e0->Object::~Object { 0x55eb64be42e0, 0x7ffefe437a70 } -0x7ffefe437a70->Object::~Object { 0x7ffefe437a70 } -exception caught: printer -end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffefe43798f->Object::Object { } -0x7ffefe437a70->Object::Object { 0x7ffefe43798f } -0x7ffefe43798f->Object::~Object { 0x7ffefe43798f, 0x7ffefe437a70 } -Next token is token 'a' (0x7ffefe437a70 'a') -0x7ffefe4379b0->Object::Object { 0x7ffefe437a70 } -0x7ffefe437a70->Object::~Object { 0x7ffefe4379b0, 0x7ffefe437a70 } -Shifting token 'a' (0x7ffefe4379b0 'a') -0x55eb64be42e0->Object::Object { 0x7ffefe4379b0 } -0x7ffefe4379b0->Object::~Object { 0x55eb64be42e0, 0x7ffefe4379b0 } -Entering state 2 -Stack now 0 2 -0x7ffefe437a90->Object::Object { 0x55eb64be42e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55eb64be42e0 'a') --> $$ = nterm item (0x7ffefe437a90 'a') -0x55eb64be42e0->Object::~Object { 0x55eb64be42e0, 0x7ffefe437a90 } -0x55eb64be42e0->Object::Object { 0x7ffefe437a90 } -0x7ffefe437a90->Object::~Object { 0x55eb64be42e0, 0x7ffefe437a90 } -Entering state 11 -Stack now 0 11 -Reading a token -0x7ffefe43798f->Object::Object { 0x55eb64be42e0 } -0x7ffefe437a70->Object::Object { 0x55eb64be42e0, 0x7ffefe43798f } -0x7ffefe43798f->Object::~Object { 0x55eb64be42e0, 0x7ffefe43798f, 0x7ffefe437a70 } -Next token is token 'a' (0x7ffefe437a70 'a') -0x7ffefe4379b0->Object::Object { 0x55eb64be42e0, 0x7ffefe437a70 } -0x7ffefe437a70->Object::~Object { 0x55eb64be42e0, 0x7ffefe4379b0, 0x7ffefe437a70 } -Shifting token 'a' (0x7ffefe4379b0 'a') -0x55eb64be4300->Object::Object { 0x55eb64be42e0, 0x7ffefe4379b0 } -0x7ffefe4379b0->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe4379b0 } -Entering state 2 -Stack now 0 11 2 -0x7ffefe437a90->Object::Object { 0x55eb64be42e0, 0x55eb64be4300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55eb64be4300 'a') --> $$ = nterm item (0x7ffefe437a90 'a') -0x55eb64be4300->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe437a90 } -0x55eb64be4300->Object::Object { 0x55eb64be42e0, 0x7ffefe437a90 } -0x7ffefe437a90->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe437a90 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0x7ffefe43798f->Object::Object { 0x55eb64be42e0, 0x55eb64be4300 } -0x7ffefe437a70->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe43798f } -0x7ffefe43798f->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe43798f, 0x7ffefe437a70 } -Next token is token 'a' (0x7ffefe437a70 'a') -0x7ffefe4379b0->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe437a70 } -0x7ffefe437a70->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe4379b0, 0x7ffefe437a70 } -Shifting token 'a' (0x7ffefe4379b0 'a') -0x55eb64be4320->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe4379b0 } -0x7ffefe4379b0->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe4379b0 } -Entering state 2 -Stack now 0 11 11 2 -0x7ffefe437a90->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55eb64be4320 'a') --> $$ = nterm item (0x7ffefe437a90 'a') -0x55eb64be4320->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe437a90 } -0x55eb64be4320->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe437a90 } -0x7ffefe437a90->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe437a90 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x7ffefe43798f->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320 } -0x7ffefe437a70->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe43798f } -0x7ffefe43798f->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe43798f, 0x7ffefe437a70 } -Next token is token 'a' (0x7ffefe437a70 'a') -0x7ffefe4379b0->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe437a70 } -0x7ffefe437a70->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe4379b0, 0x7ffefe437a70 } -Shifting token 'a' (0x7ffefe4379b0 'a') -0x55eb64be4340->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe4379b0 } -0x7ffefe4379b0->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe4379b0 } -Entering state 2 -Stack now 0 11 11 11 2 -0x7ffefe437a90->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55eb64be4340 'a') --> $$ = nterm item (0x7ffefe437a90 'a') -0x55eb64be4340->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe437a90 } -0x55eb64be4340->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe437a90 } -0x7ffefe437a90->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe437a90 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x7ffefe43798f->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340 } -0x7ffefe437a70->Object::Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe43798f } -0x7ffefe43798f->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe43798f, 0x7ffefe437a70 } -Next token is token 'p' (0x7ffefe437a70 'p'Exception caught: cleaning lookahead and stack -0x55eb64be4340->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x55eb64be4340, 0x7ffefe437a70 } -0x55eb64be4320->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x55eb64be4320, 0x7ffefe437a70 } -0x55eb64be4300->Object::~Object { 0x55eb64be42e0, 0x55eb64be4300, 0x7ffefe437a70 } -0x55eb64be42e0->Object::~Object { 0x55eb64be42e0, 0x7ffefe437a70 } -0x7ffefe437a70->Object::~Object { 0x7ffefe437a70 } -exception caught: printer -end { } -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: grep '^exception caught: printer$' stderr -444. types.at:139: ok -stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae +./calc.at:1494: cat stderr -stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1066: $PREPARSER ./input < in -./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -./c++.at:1066: $PREPARSER ./input < in -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -691. c++.at:1517: testing Default action ... -======== Testing with C++ standard flags: '' ./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -error: invalid expression -======== Testing with C++ standard flags: '' -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: -./c++.at:573: $here/modern -stdout: -Legac++ -./c++.at:573: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stderr: -stdout: -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +570. calc.at:1494: ok ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -stderr: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stdout: -./c++.at:1066: ./check --std=c++98 not supported +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + stderr: stdout: ./c++.at:855: $PREPARSER ./input +======== Testing with C++ standard flags: '' +714. glr-regression.at:205: testing Badly Collapsed GLR States: glr.c ... +./glr-regression.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.c glr-regr1.y +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: ./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +716. glr-regression.at:207: testing Badly Collapsed GLR States: glr2.cc ... +./glr-regression.at:207: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y ======== Testing with C++ standard flags: '' ./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +715. glr-regression.at:206: testing Badly Collapsed GLR States: glr.cc ... +./glr-regression.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y stderr: -stdout: -./c++.at:1066: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stderr: -stdout: -stdout: -./c++.at:570: $here/modern -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -Legac++ -./c++.at:570: $PREPARSER ./list -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stdout: -stderr: -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -450. types.at:139: ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -439. types.at:139: ok - -stderr: -stderr: -stdout: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS - -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -692. java.at:25: testing Java invalid directives ... -./java.at:35: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y -stderr: +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:456: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +717. glr-regression.at:354: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.c ... +./glr-regression.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.c glr-regr2a.y +./glr-regression.at:205: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr1 glr-regr1.c $LIBS stderr: stdout: -stdout: -./c++.at:1361: $PREPARSER ./input aaaas -693. java.at:186: testing Java parser class and package names ... -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./java.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:207: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS stderr: -stdout: -./c++.at:1361: $PREPARSER ./input aaaal ./c++.at:1463: sed -ne '/INCLUDED/p;/\\file/{p;n;p;}' include/ast/loc.hh -stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1471: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x2.cc x2.yy -./java.at:50: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y -./c++.at:1361: $PREPARSER ./input i -stderr: -./java.at:188: grep '[mb]4_' YYParser.y -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1361: $PREPARSER ./input aaaap -693. java.at:186: stderr: - skipped (java.at:188) -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap - -stderr: stdout: +./c++.at:1471: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x2.cc x2.yy +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./glr-regression.at:206: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x565431dccf40->Object::Object { } -Next token is token 'a' (0x565431dccf40 'a') -Shifting token 'a' (0x565431dccf40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x565431dccf40 'a') --> $$ = nterm item (0x565431dccf40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x565431dccf90->Object::Object { 0x565431dccf40 } -Next token is token 'a' (0x565431dccf90 'a') -Shifting token 'a' (0x565431dccf90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x565431dccf90 'a') --> $$ = nterm item (0x565431dccf90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x565431dccfe0->Object::Object { 0x565431dccf40, 0x565431dccf90 } -Next token is token 'a' (0x565431dccfe0 'a') -Shifting token 'a' (0x565431dccfe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x565431dccfe0 'a') --> $$ = nterm item (0x565431dccfe0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x565431dcd030->Object::Object { 0x565431dccf40, 0x565431dccf90, 0x565431dccfe0 } -Next token is token 'a' (0x565431dcd030 'a') -Shifting token 'a' (0x565431dcd030 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x565431dcd030 'a') --> $$ = nterm item (0x565431dcd030 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x565431dcd080->Object::Object { 0x565431dccf40, 0x565431dccf90, 0x565431dccfe0, 0x565431dcd030 } -Next token is token 'p' (0x565431dcd080 'p'Exception caught: cleaning lookahead and stack -0x565431dcd080->Object::~Object { 0x565431dccf40, 0x565431dccf90, 0x565431dccfe0, 0x565431dcd030, 0x565431dcd080 } -0x565431dcd030->Object::~Object { 0x565431dccf40, 0x565431dccf90, 0x565431dccfe0, 0x565431dcd030 } -0x565431dccfe0->Object::~Object { 0x565431dccf40, 0x565431dccf90, 0x565431dccfe0 } -0x565431dccf90->Object::~Object { 0x565431dccf40, 0x565431dccf90 } -0x565431dccf40->Object::~Object { 0x565431dccf40 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:92: $PREPARSER ./input ./c++.at:235: $PREPARSER ./list +./glr-regression.at:354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.c $LIBS stderr: -./c++.at:1471: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x2.o x2.cc -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x565431dccf40->Object::Object { } -Next token is token 'a' (0x565431dccf40 'a') -Shifting token 'a' (0x565431dccf40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x565431dccf40 'a') --> $$ = nterm item (0x565431dccf40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x565431dccf90->Object::Object { 0x565431dccf40 } -Next token is token 'a' (0x565431dccf90 'a') -Shifting token 'a' (0x565431dccf90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x565431dccf90 'a') --> $$ = nterm item (0x565431dccf90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x565431dccfe0->Object::Object { 0x565431dccf40, 0x565431dccf90 } -Next token is token 'a' (0x565431dccfe0 'a') -Shifting token 'a' (0x565431dccfe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x565431dccfe0 'a') --> $$ = nterm item (0x565431dccfe0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x565431dcd030->Object::Object { 0x565431dccf40, 0x565431dccf90, 0x565431dccfe0 } -Next token is token 'a' (0x565431dcd030 'a') -Shifting token 'a' (0x565431dcd030 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x565431dcd030 'a') --> $$ = nterm item (0x565431dcd030 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x565431dcd080->Object::Object { 0x565431dccf40, 0x565431dccf90, 0x565431dccfe0, 0x565431dcd030 } -Next token is token 'p' (0x565431dcd080 'p'Exception caught: cleaning lookahead and stack -0x565431dcd080->Object::~Object { 0x565431dccf40, 0x565431dccf90, 0x565431dccfe0, 0x565431dcd030, 0x565431dcd080 } -0x565431dcd030->Object::~Object { 0x565431dccf40, 0x565431dccf90, 0x565431dccfe0, 0x565431dcd030 } -0x565431dccfe0->Object::~Object { 0x565431dccf40, 0x565431dccf90, 0x565431dccfe0 } -0x565431dccf90->Object::~Object { 0x565431dccf40, 0x565431dccf90 } -0x565431dccf40->Object::~Object { 0x565431dccf40 } -exception caught: printer -end { } -./c++.at:1361: grep '^exception caught: printer$' stderr -stdout: -stderr: -exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae -692. java.at:25: ./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -exception caught: syntax error +./c++.at:1471: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x2.o x2.cc ./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -694. java.at:217: testing Java parser class modifiers ... -./java.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./c++.at:1361: $PREPARSER ./input aaaaT -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1361: $PREPARSER ./input aaaaR stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./java.at:219: grep '[mb]4_' YYParser.y stdout: stderr: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stdout: -./c++.at:1064: $PREPARSER ./input < in -695. java.at:287: testing Java parser class extends and implements ... -694. java.at:217: stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (java.at:219) -./java.at:289: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./c++.at:1064: $PREPARSER ./input < in - -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in stderr: -./java.at:289: grep '[mb]4_' YYParser.y -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +./c++.at:941: $PREPARSER ./input +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -stdout: -695. java.at:287: ./c++.at:941: $PREPARSER ./input -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS - skipped (java.at:289) stderr: syntax error Discarding 'a'. Reducing 'a'. - ./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -696. java.at:307: testing Java %parse-param and %lex-param ... -./java.at:309: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -stderr: -stdout: -./c++.at:567: $here/modern -stdout: -Legac++ -./c++.at:567: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./java.at:309: grep '[mb]4_' YYParser.y -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: -696. java.at:307: skipped (java.at:309) - -697. java.at:381: testing Java throws specifications ... -./java.at:441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./java.at:441: grep '[mb]4_' YYParser.y -698. java.at:470: testing Java constructor init and init_throws ... -./java.at:475: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -stdout: -697. java.at:381: ./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS - skipped (java.at:441) - -./java.at:475: grep '[mb]4_' YYParser.y -stdout: -698. java.at:470: stderr: -stdout: -./c++.at:572: $here/modern - skipped (java.at:475) -stdout: -Legac++ -./c++.at:572: $PREPARSER ./list -stderr: - -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: -699. java.at:497: testing Java value, position, and location types ... -./c++.at:566: $here/modern -./java.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -stdout: -Legac++ -./c++.at:566: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./java.at:499: grep '[mb]4_' YYParser.y -stdout: -700. java.at:528: testing Java syntax error handling without error token ... -./java.at:579: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret YYParser.y -699. java.at:497: skipped (java.at:499) -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS - -700. java.at:528: skipped (java.at:580) - -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -701. javapush.at:172: testing Trivial Push Parser with api.push-pull verification ... -./c++.at:1065: $PREPARSER ./input < in -./javapush.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=pull -o Main.java input.y -./c++.at:568: $here/modern -stderr: -error: invalid character -702. javapush.at:217: testing Trivial Push Parser with %initial-action ... -./javapush.at:227: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./javapush.at:182: grep -c '^.*public boolean parse().*$' Main.java -stdout: -Legac++ -./c++.at:568: $PREPARSER ./list -======== Testing with C++ standard flags: '' -./javapush.at:187: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java -./javapush.at:228: grep -c '^System.err.println("Initial action invoked");$' Main.java -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stderr: -stdout: -stderr: -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stdout: -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: -702. javapush.at:217: ./javapush.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=both -o Main.java input.y -======== Testing with C++ standard flags: '' - skipped (javapush.at:230) ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y - ./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -703. d.at:103: testing D parser class extends and implements ... -./d.at:106: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./d.at:106: grep '[mb]4_' YYParser.y -./javapush.at:192: grep -c '^.*public boolean parse().*$' Main.java -stdout: -703. d.at:103: ./javapush.at:195: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java - skipped (d.at:106) - -./javapush.at:199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -451. types.at:139: ok -./javapush.at:200: grep -c '^.*public boolean parse().*$' Main.java -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./javapush.at:203: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java - -704. d.at:138: testing D parser class api.token.raw true by default ... -./d.at:141: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -701. javapush.at:172: skipped (javapush.at:207) - -705. cxx-type.at:409: testing GLR: Resolve ambiguity, impure, no locations ... -./d.at:141: grep '[mb]4_' YYParser.y -./cxx-type.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -stdout: -704. d.at:138: stderr: stdout: -./types.at:139: $PREPARSER ./test - skipped (d.at:141) -stderr: - -706. cxx-type.at:415: testing GLR: Resolve ambiguity, impure, locations ... +./regression.at:1878: $PREPARSER ./input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./cxx-type.at:416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -./c++.at:855: $PREPARSER ./input +./regression.at:1878: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +657. regression.at:1878: ok stderr: stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -452. types.at:139: ok stdout: -======== Testing with C++ standard flags: '' -./c++.at:849: $PREPARSER ./input -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:410: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: - +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:659: $PREPARSER ./input + stderr: Starting parse Entering state 0 @@ -250976,768 +249261,39 @@ Cleanup: popping nterm expr (40) destroy: 40 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -707. cxx-type.at:420: testing GLR: Resolve ambiguity, pure, no locations ... -stderr: -./cxx-type.at:421: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -stdout: -stderr: -stdout: -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -types.y:87.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -443. types.at:139: ok - -stderr: -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -708. cxx-type.at:426: testing GLR: Resolve ambiguity, pure, locations ... -./cxx-type.at:421: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./cxx-type.at:427: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -stderr: -stdout: -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:571: $here/modern -stdout: -Legac++ -./c++.at:571: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -709. cxx-type.at:432: testing GLR: Merge conflicting parses, impure, no locations ... -./cxx-type.at:433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:569: $here/modern -stderr: -types.y:87.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stdout: -./cxx-type.at:427: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -Legac++ -./c++.at:569: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:433: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -stderr: -stdout: -./c++.at:1501: $CXX $CPPFLAGS $CXXFLAGS -Iinclude $LDFLAGS -o parser x[12].o main.cc $LIBS -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -445. types.at:139: ok -stderr: -stderr: -stdout: -./c++.at:574: $here/modern -stdout: -stdout: -./types.at:139: $PREPARSER ./test - -Legac++ -./c++.at:574: $PREPARSER ./list -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -446. types.at:139: ok -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: - -stdout: -./types.at:139: $PREPARSER ./test -stderr: -stderr: -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -440. types.at:139: ok -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -710. cxx-type.at:438: testing GLR: Merge conflicting parses, impure, locations ... -448. types.at:139: ok -./cxx-type.at:439: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y - -711. cxx-type.at:444: testing GLR: Merge conflicting parses, pure, no locations ... -./cxx-type.at:445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y - -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -712. cxx-type.at:449: testing GLR: Merge conflicting parses, pure, locations ... -./c++.at:1360: $PREPARSER ./input aaaas -stderr: -stderr: -./cxx-type.at:450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -stdout: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -713. cxx-type.at:455: testing GLR: Verbose messages, resolve ambiguity, impure, no locations ... -./types.at:139: $PREPARSER ./test -types.y:87.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:439: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./c++.at:1360: $PREPARSER ./input aaaal -./cxx-type.at:456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -stderr: -stderr: -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input i -447. types.at:139: ok -stderr: -exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaap -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: - -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input --debug aaaap -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -442. types.at:139: ok -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x56507dcbaf40->Object::Object { } -Next token is token 'a' (0x56507dcbaf40 'a') -Shifting token 'a' (0x56507dcbaf40 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56507dcbaf40 'a') --> $$ = nterm item (0x56507dcbaf40 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x56507dcbaf90->Object::Object { 0x56507dcbaf40 } -Next token is token 'a' (0x56507dcbaf90 'a') -Shifting token 'a' (0x56507dcbaf90 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56507dcbaf90 'a') --> $$ = nterm item (0x56507dcbaf90 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x56507dcbafe0->Object::Object { 0x56507dcbaf40, 0x56507dcbaf90 } -Next token is token 'a' (0x56507dcbafe0 'a') -Shifting token 'a' (0x56507dcbafe0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56507dcbafe0 'a') --> $$ = nterm item (0x56507dcbafe0 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x56507dcbb030->Object::Object { 0x56507dcbaf40, 0x56507dcbaf90, 0x56507dcbafe0 } -Next token is token 'a' (0x56507dcbb030 'a') -Shifting token 'a' (0x56507dcbb030 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56507dcbb030 'a') --> $$ = nterm item (0x56507dcbb030 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x56507dcbb080->Object::Object { 0x56507dcbaf40, 0x56507dcbaf90, 0x56507dcbafe0, 0x56507dcbb030 } -Next token is token 'p' (0x56507dcbb080 'p'Exception caught: cleaning lookahead and stack -0x56507dcbb080->Object::~Object { 0x56507dcbaf40, 0x56507dcbaf90, 0x56507dcbafe0, 0x56507dcbb030, 0x56507dcbb080 } -0x56507dcbb030->Object::~Object { 0x56507dcbaf40, 0x56507dcbaf90, 0x56507dcbafe0, 0x56507dcbb030 } -0x56507dcbafe0->Object::~Object { 0x56507dcbaf40, 0x56507dcbaf90, 0x56507dcbafe0 } -0x56507dcbaf90->Object::~Object { 0x56507dcbaf40, 0x56507dcbaf90 } -0x56507dcbaf40->Object::~Object { 0x56507dcbaf40 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stderr: -stderr: -./cxx-type.at:456: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x56507dcbaf40->Object::Object { } -Next token is token 'a' (0x56507dcbaf40 'a') -Shifting token 'a' (0x56507dcbaf40 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56507dcbaf40 'a') --> $$ = nterm item (0x56507dcbaf40 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x56507dcbaf90->Object::Object { 0x56507dcbaf40 } -Next token is token 'a' (0x56507dcbaf90 'a') -Shifting token 'a' (0x56507dcbaf90 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56507dcbaf90 'a') --> $$ = nterm item (0x56507dcbaf90 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x56507dcbafe0->Object::Object { 0x56507dcbaf40, 0x56507dcbaf90 } -Next token is token 'a' (0x56507dcbafe0 'a') -Shifting token 'a' (0x56507dcbafe0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56507dcbafe0 'a') --> $$ = nterm item (0x56507dcbafe0 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x56507dcbb030->Object::Object { 0x56507dcbaf40, 0x56507dcbaf90, 0x56507dcbafe0 } -Next token is token 'a' (0x56507dcbb030 'a') -Shifting token 'a' (0x56507dcbb030 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56507dcbb030 'a') --> $$ = nterm item (0x56507dcbb030 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x56507dcbb080->Object::Object { 0x56507dcbaf40, 0x56507dcbaf90, 0x56507dcbafe0, 0x56507dcbb030 } -Next token is token 'p' (0x56507dcbb080 'p'Exception caught: cleaning lookahead and stack -0x56507dcbb080->Object::~Object { 0x56507dcbaf40, 0x56507dcbaf90, 0x56507dcbafe0, 0x56507dcbb030, 0x56507dcbb080 } -0x56507dcbb030->Object::~Object { 0x56507dcbaf40, 0x56507dcbaf90, 0x56507dcbafe0, 0x56507dcbb030 } -0x56507dcbafe0->Object::~Object { 0x56507dcbaf40, 0x56507dcbaf90, 0x56507dcbafe0 } -0x56507dcbaf90->Object::~Object { 0x56507dcbaf40, 0x56507dcbaf90 } -0x56507dcbaf40->Object::~Object { 0x56507dcbaf40 } -exception caught: printer -end { } -stdout: -./c++.at:1555: $PREPARSER ./test -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: - -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -stderr: -stderr: -stderr: -types.y:87.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -exception caught: syntax error -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:450: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./c++.at:1360: $PREPARSER ./input aaaaE -======== Testing with C++ standard flags: '' -stderr: -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -714. glr-regression.at:205: testing Badly Collapsed GLR States: glr.c ... -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./glr-regression.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.c glr-regr1.y -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 1876 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 144 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./c++.at:1360: $PREPARSER ./input aaaaT -stderr: -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -715. glr-regression.at:206: testing Badly Collapsed GLR States: glr.cc ... -./glr-regression.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y -======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:205: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr1 glr-regr1.c $LIBS -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -stderr: -stderr: -./glr-regression.at:206: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal -stderr: -stdout: -./c++.at:1555: ./check -449. types.at:139: stderr: - ok -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr --std=c++98 not supported -======== Testing with C++ standard flags: '' -./c++.at:1363: $PREPARSER ./input i -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./c++.at:1363: $PREPARSER ./input aaaap -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffc890eed9f->Object::Object { } -0x7ffc890eee60->Object::Object { 0x7ffc890eed9f } -0x7ffc890eed9f->Object::~Object { 0x7ffc890eed9f, 0x7ffc890eee60 } -Next token is token 'a' (0x7ffc890eee60 'a') -0x7ffc890eedd0->Object::Object { 0x7ffc890eee60 } -0x7ffc890eed87->Object::Object { 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eed87->Object::~Object { 0x7ffc890eed87, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eee60->Object::~Object { 0x7ffc890eedd0, 0x7ffc890eee60 } -Shifting token 'a' (0x7ffc890eedd0 'a') -0x5654f75f22e0->Object::Object { 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::Object { 0x5654f75f22e0, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::~Object { 0x5654f75f22e0, 0x7ffc890eed5f, 0x7ffc890eedd0 } -0x7ffc890eedd0->Object::~Object { 0x5654f75f22e0, 0x7ffc890eedd0 } -Entering state 1 -Stack now 0 1 -0x7ffc890eee80->Object::Object { 0x5654f75f22e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5654f75f22e0 'a') --> $$ = nterm item (0x7ffc890eee80 'a') -0x5654f75f22e0->Object::~Object { 0x5654f75f22e0, 0x7ffc890eee80 } -0x5654f75f22e0->Object::Object { 0x7ffc890eee80 } -0x7ffc890eee38->Object::Object { 0x5654f75f22e0, 0x7ffc890eee80 } -0x7ffc890eee38->Object::~Object { 0x5654f75f22e0, 0x7ffc890eee38, 0x7ffc890eee80 } -0x7ffc890eee80->Object::~Object { 0x5654f75f22e0, 0x7ffc890eee80 } -Entering state 10 -Stack now 0 10 -Reading a token -0x7ffc890eed9f->Object::Object { 0x5654f75f22e0 } -0x7ffc890eee60->Object::Object { 0x5654f75f22e0, 0x7ffc890eed9f } -0x7ffc890eed9f->Object::~Object { 0x5654f75f22e0, 0x7ffc890eed9f, 0x7ffc890eee60 } -Next token is token 'a' (0x7ffc890eee60 'a') -0x7ffc890eedd0->Object::Object { 0x5654f75f22e0, 0x7ffc890eee60 } -0x7ffc890eed87->Object::Object { 0x5654f75f22e0, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eed87->Object::~Object { 0x5654f75f22e0, 0x7ffc890eed87, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eee60->Object::~Object { 0x5654f75f22e0, 0x7ffc890eedd0, 0x7ffc890eee60 } -Shifting token 'a' (0x7ffc890eedd0 'a') -0x5654f75f2300->Object::Object { 0x5654f75f22e0, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eed5f, 0x7ffc890eedd0 } -0x7ffc890eedd0->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eedd0 } -Entering state 1 -Stack now 0 10 1 -0x7ffc890eee80->Object::Object { 0x5654f75f22e0, 0x5654f75f2300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5654f75f2300 'a') --> $$ = nterm item (0x7ffc890eee80 'a') -0x5654f75f2300->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee80 } -0x5654f75f2300->Object::Object { 0x5654f75f22e0, 0x7ffc890eee80 } -0x7ffc890eee38->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee80 } -0x7ffc890eee38->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee38, 0x7ffc890eee80 } -0x7ffc890eee80->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee80 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0x7ffc890eed9f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300 } -0x7ffc890eee60->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eed9f } -0x7ffc890eed9f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eed9f, 0x7ffc890eee60 } -Next token is token 'a' (0x7ffc890eee60 'a') -0x7ffc890eedd0->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee60 } -0x7ffc890eed87->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eed87->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eed87, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eee60->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eedd0, 0x7ffc890eee60 } -Shifting token 'a' (0x7ffc890eedd0 'a') -0x5654f75f2320->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eed5f, 0x7ffc890eedd0 } -0x7ffc890eedd0->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eedd0 } -Entering state 1 -Stack now 0 10 10 1 -0x7ffc890eee80->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5654f75f2320 'a') --> $$ = nterm item (0x7ffc890eee80 'a') -0x5654f75f2320->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee80 } -0x5654f75f2320->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee80 } -0x7ffc890eee38->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee80 } -0x7ffc890eee38->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee38, 0x7ffc890eee80 } -0x7ffc890eee80->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee80 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x7ffc890eed9f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320 } -0x7ffc890eee60->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eed9f } -0x7ffc890eed9f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eed9f, 0x7ffc890eee60 } -Next token is token 'a' (0x7ffc890eee60 'a') -0x7ffc890eedd0->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee60 } -0x7ffc890eed87->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eed87->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eed87, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eee60->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eedd0, 0x7ffc890eee60 } -Shifting token 'a' (0x7ffc890eedd0 'a') -0x5654f75f2340->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eed5f, 0x7ffc890eedd0 } -0x7ffc890eedd0->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eedd0 } -Entering state 1 -Stack now 0 10 10 10 1 -0x7ffc890eee80->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5654f75f2340 'a') --> $$ = nterm item (0x7ffc890eee80 'a') -0x5654f75f2340->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eee80 } -0x5654f75f2340->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee80 } -0x7ffc890eee38->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eee80 } -0x7ffc890eee38->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eee38, 0x7ffc890eee80 } -0x7ffc890eee80->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eee80 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x7ffc890eed9f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340 } -0x7ffc890eee60->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eed9f } -0x7ffc890eed9f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eed9f, 0x7ffc890eee60 } -Next token is token 'p' (0x7ffc890eee60 'p'Exception caught: cleaning lookahead and stack -0x5654f75f2340->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eee60 } -0x5654f75f2320->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee60 } -0x5654f75f2300->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee60 } -0x5654f75f22e0->Object::~Object { 0x5654f75f22e0, 0x7ffc890eee60 } -0x7ffc890eee60->Object::~Object { 0x7ffc890eee60 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffc890eed9f->Object::Object { } -0x7ffc890eee60->Object::Object { 0x7ffc890eed9f } -0x7ffc890eed9f->Object::~Object { 0x7ffc890eed9f, 0x7ffc890eee60 } -Next token is token 'a' (0x7ffc890eee60 'a') -0x7ffc890eedd0->Object::Object { 0x7ffc890eee60 } -0x7ffc890eed87->Object::Object { 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eed87->Object::~Object { 0x7ffc890eed87, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eee60->Object::~Object { 0x7ffc890eedd0, 0x7ffc890eee60 } -Shifting token 'a' (0x7ffc890eedd0 'a') -0x5654f75f22e0->Object::Object { 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::Object { 0x5654f75f22e0, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::~Object { 0x5654f75f22e0, 0x7ffc890eed5f, 0x7ffc890eedd0 } -0x7ffc890eedd0->Object::~Object { 0x5654f75f22e0, 0x7ffc890eedd0 } -Entering state 1 -Stack now 0 1 -0x7ffc890eee80->Object::Object { 0x5654f75f22e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5654f75f22e0 'a') --> $$ = nterm item (0x7ffc890eee80 'a') -0x5654f75f22e0->Object::~Object { 0x5654f75f22e0, 0x7ffc890eee80 } -0x5654f75f22e0->Object::Object { 0x7ffc890eee80 } -0x7ffc890eee38->Object::Object { 0x5654f75f22e0, 0x7ffc890eee80 } -0x7ffc890eee38->Object::~Object { 0x5654f75f22e0, 0x7ffc890eee38, 0x7ffc890eee80 } -0x7ffc890eee80->Object::~Object { 0x5654f75f22e0, 0x7ffc890eee80 } -Entering state 10 -Stack now 0 10 -Reading a token -0x7ffc890eed9f->Object::Object { 0x5654f75f22e0 } -0x7ffc890eee60->Object::Object { 0x5654f75f22e0, 0x7ffc890eed9f } -0x7ffc890eed9f->Object::~Object { 0x5654f75f22e0, 0x7ffc890eed9f, 0x7ffc890eee60 } -Next token is token 'a' (0x7ffc890eee60 'a') -0x7ffc890eedd0->Object::Object { 0x5654f75f22e0, 0x7ffc890eee60 } -0x7ffc890eed87->Object::Object { 0x5654f75f22e0, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eed87->Object::~Object { 0x5654f75f22e0, 0x7ffc890eed87, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eee60->Object::~Object { 0x5654f75f22e0, 0x7ffc890eedd0, 0x7ffc890eee60 } -Shifting token 'a' (0x7ffc890eedd0 'a') -0x5654f75f2300->Object::Object { 0x5654f75f22e0, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eed5f, 0x7ffc890eedd0 } -0x7ffc890eedd0->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eedd0 } -Entering state 1 -Stack now 0 10 1 -0x7ffc890eee80->Object::Object { 0x5654f75f22e0, 0x5654f75f2300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5654f75f2300 'a') --> $$ = nterm item (0x7ffc890eee80 'a') -0x5654f75f2300->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee80 } -0x5654f75f2300->Object::Object { 0x5654f75f22e0, 0x7ffc890eee80 } -0x7ffc890eee38->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee80 } -0x7ffc890eee38->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee38, 0x7ffc890eee80 } -0x7ffc890eee80->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee80 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0x7ffc890eed9f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300 } -0x7ffc890eee60->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eed9f } -0x7ffc890eed9f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eed9f, 0x7ffc890eee60 } -Next token is token 'a' (0x7ffc890eee60 'a') -0x7ffc890eedd0->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee60 } -0x7ffc890eed87->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eed87->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eed87, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eee60->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eedd0, 0x7ffc890eee60 } -Shifting token 'a' (0x7ffc890eedd0 'a') -0x5654f75f2320->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eed5f, 0x7ffc890eedd0 } -0x7ffc890eedd0->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eedd0 } -Entering state 1 -Stack now 0 10 10 1 -0x7ffc890eee80->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5654f75f2320 'a') --> $$ = nterm item (0x7ffc890eee80 'a') -0x5654f75f2320->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee80 } -0x5654f75f2320->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee80 } -0x7ffc890eee38->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee80 } -0x7ffc890eee38->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee38, 0x7ffc890eee80 } -0x7ffc890eee80->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee80 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x7ffc890eed9f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320 } -0x7ffc890eee60->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eed9f } -0x7ffc890eed9f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eed9f, 0x7ffc890eee60 } -Next token is token 'a' (0x7ffc890eee60 'a') -0x7ffc890eedd0->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee60 } -0x7ffc890eed87->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eed87->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eed87, 0x7ffc890eedd0, 0x7ffc890eee60 } -0x7ffc890eee60->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eedd0, 0x7ffc890eee60 } -Shifting token 'a' (0x7ffc890eedd0 'a') -0x5654f75f2340->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eedd0 } -0x7ffc890eed5f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eed5f, 0x7ffc890eedd0 } -0x7ffc890eedd0->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eedd0 } -Entering state 1 -Stack now 0 10 10 10 1 -0x7ffc890eee80->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5654f75f2340 'a') --> $$ = nterm item (0x7ffc890eee80 'a') -0x5654f75f2340->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eee80 } -0x5654f75f2340->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee80 } -0x7ffc890eee38->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eee80 } -0x7ffc890eee38->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eee38, 0x7ffc890eee80 } -0x7ffc890eee80->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eee80 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x7ffc890eed9f->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340 } -0x7ffc890eee60->Object::Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eed9f } -0x7ffc890eed9f->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eed9f, 0x7ffc890eee60 } -Next token is token 'p' (0x7ffc890eee60 'p'Exception caught: cleaning lookahead and stack -0x5654f75f2340->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x5654f75f2340, 0x7ffc890eee60 } -0x5654f75f2320->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x5654f75f2320, 0x7ffc890eee60 } -0x5654f75f2300->Object::~Object { 0x5654f75f22e0, 0x5654f75f2300, 0x7ffc890eee60 } -0x5654f75f22e0->Object::~Object { 0x5654f75f22e0, 0x7ffc890eee60 } -0x7ffc890eee60->Object::~Object { 0x7ffc890eee60 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr -stderr: -stdout: -stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -./c++.at:1064: $PREPARSER ./input < in -stderr: -stderr: -error: invalid expression -caught error -error: invalid character -caught error -exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -716. glr-regression.at:207: testing Badly Collapsed GLR States: glr2.cc ... -./glr-regression.at:207: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y -./c++.at:1363: $PREPARSER ./input aaaaE -./c++.at:1064: $PREPARSER ./input < in -stderr: -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1363: $PREPARSER ./input aaaaT -./c++.at:1064: $PREPARSER ./input < in -stderr: -stdout: -./c++.at:1555: ./check -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr --std=c++03 not supported -======== Testing with C++ standard flags: '' -stderr: -error: invalid character +718. glr-regression.at:355: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.cc ... +./glr-regression.at:355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y stderr: -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:1363: $PREPARSER ./input aaaaR -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:855: $PREPARSER ./input -stderr: -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' stderr: -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' +./regression.at:1716: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt stdout: -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:92: $PREPARSER ./input stderr: +./regression.at:1716: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./regression.at:1716: grep 'syntax error,' stderr.txt ./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1716: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./glr-regression.at:207: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS +./regression.at:1716: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1716: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt +./glr-regression.at:355: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS +./regression.at:1719: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.cc input.y ./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stderr: stdout: ./cxx-type.at:412: $PREPARSER ./types test-input +./regression.at:1719: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: syntax error ./cxx-type.at:412: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: ./cxx-type.at:412: $PREPARSER ./types -p test-input -./c++.at:1555: ./check stderr: Starting parse Entering state 0 @@ -252305,11 +249861,9 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () --std=c++11 not supported -======== Testing with C++ standard flags: '' ./cxx-type.at:412: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -252876,16 +250430,94 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -705. cxx-type.at:409: ok -stderr: +stdout: +705. cxx-type.at:409: ./c++.at:566: $here/modern + ok +stdout: +Modern C++: 201703 +./c++.at:566: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: -./cxx-type.at:423: $PREPARSER ./types test-input +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:568: $here/modern +719. glr-regression.at:356: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr2.cc ... +./glr-regression.at:356: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y +stderr: +stdout: +stdout: +Modern C++: 201703 +./cxx-type.at:435: $PREPARSER ./types test-input +stderr: +./c++.at:568: $PREPARSER ./list stderr: syntax error -./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:423: $PREPARSER ./types -p test-input +stdout: +./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./cxx-type.at:417: $PREPARSER ./types test-input +stderr: +17.5: syntax error +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./cxx-type.at:435: $PREPARSER ./types -p test-input +./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: stderr: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./cxx-type.at:447: $PREPARSER ./types test-input Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -253204,6 +250836,19 @@ Reducing stack -1 by rule 4 (line 75): $1 = nterm decl () -> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Reducing stack -1 by rule 2 (line 65): $1 = nterm prog () $2 = nterm stmt () @@ -253345,319 +250990,6 @@ Reducing stack -1 by rule 4 (line 75): $1 = nterm decl () -> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ID () -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 20 -Next token is token ID () -syntax error -Error: popping nterm expr () -Error: popping token '(' () -Error: popping token TYPENAME () -Shifting token error () -Entering state 3 -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token ')' () -Error: discarding token ')' () -Reading a token -Next token is token '=' () -Error: discarding token '=' () -Reading a token -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token ';' () -Entering state 3 -Next token is token ';' () -Shifting token ';' () -Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 15 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token '@' () -Shifting token '@' () -Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () -./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reducing stack 0 by rule 1 (line 64): --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 15 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Entering state 13 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 23 -Reducing stack 0 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Entering state 13 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 22 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 29 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 30 -Reducing stack 0 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 14 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 24 -Reading a token -Next token is token ';' () -Reducing stack 0 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ')' () -Stack 0 Entering state 18 -Next token is token ')' () -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' () -Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () -Stack 0 now in state 27 -On stack 1, shifting token ')' () -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '+' () -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '+' () -Stack 1 dies. -Removing dead stacks. -On stack 0, shifting token '+' () -Stack 0 now in state 15 Reducing stack -1 by rule 7 (line 80): $1 = token ID () -> $$ = nterm expr () @@ -253667,231 +250999,6 @@ $3 = nterm expr () $4 = token ')' () -> $$ = nterm expr () -Returning to deterministic operation. -Entering state 15 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ')' () -Stack 0 Entering state 18 -Next token is token ')' () -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' () -Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () -Stack 0 now in state 27 -On stack 1, shifting token ')' () -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token ';' () -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token ';' () -On stack 0, shifting token ';' () -Stack 0 now in state 16 -On stack 1, shifting token ';' () -Stack 1 now in state 23 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME () -Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME () -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ')' () -Stack 0 Entering state 18 -Next token is token ')' () -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' () -Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () -Stack 0 now in state 27 -On stack 1, shifting token ')' () -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '=' () -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '=' () -On stack 0, shifting token '=' () -Stack 0 now in state 14 -On stack 1, shifting token '=' () -Stack 1 now in state 22 -Stack 0 Entering state 14 -Reading a token -Next token is token ID () -Stack 1 Entering state 22 -Next token is token ID () -On stack 0, shifting token ID () -Stack 0 now in state 5 -On stack 1, shifting token ID () -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token '+' () -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token '+' () -On stack 0, shifting token '+' () -Stack 0 now in state 15 -On stack 1, shifting token '+' () -Stack 1 now in state 15 -Stack 0 Entering state 15 -Reading a token -Next token is token ID () -Stack 1 Entering state 15 -Next token is token ID () -On stack 0, shifting token ID () -Stack 0 now in state 5 -On stack 1, shifting token ID () -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. -Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token ';' () -Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. -Stack 0 Entering state 8 -Next token is token ';' () -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. -Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token ';' () -On stack 0, shifting token ';' () -Stack 0 now in state 16 -On stack 1, shifting token ';' () -Stack 1 now in state 30 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME () -Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME () -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () Reducing stack -1 by rule 7 (line 80): $1 = token ID () -> $$ = nterm expr () @@ -253903,715 +251010,127 @@ $2 = token '+' () $3 = nterm expr () -> $$ = nterm expr () -Reducing stack -1 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ID () -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 20 -Next token is token ID () -syntax error -Error: popping nterm expr () -Error: popping token '(' () -Error: popping token TYPENAME () -Shifting token error () -Entering state 3 -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token ')' () -Error: discarding token ')' () -Reading a token -Next token is token '=' () -Error: discarding token '=' () -Reading a token -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token ';' () -Entering state 3 -Next token is token ';' () -Shifting token ';' () -Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 15 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 25 -Reducing stack 0 by rule 9 (line 83): +Reducing stack -1 by rule 10 (line 84): $1 = nterm expr () - $2 = token '+' () + $2 = token '=' () $3 = nterm expr () -> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): +Reducing stack -1 by rule 3 (line 74): $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token '@' () -Shifting token '@' () -Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () -707. cxx-type.at:420: ok -717. glr-regression.at:354: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.c ... - -./glr-regression.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.c glr-regr2a.y -stderr: -stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -./glr-regression.at:354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.c $LIBS -718. glr-regression.at:355: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.cc ... -./glr-regression.at:355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y -stderr: -stderr: -stdout: -stdout: -======== Testing with C++ standard flags: '' -./cxx-type.at:429: $PREPARSER ./types test-input -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -17.5: syntax error -./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:429: $PREPARSER ./types -p test-input -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -Starting parse -Entering state 0 -Reducing stack 0 by rule 1 (line 71): --> $$ = nterm prog (1.1: ) -Entering state 1 -Reading a token -Next token is token ID (3.0: ) -Shifting token ID (3.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.0: ) --> $$ = nterm expr (3.0: ) -Entering state 8 -Reading a token -Next token is token '+' (3.2: ) -Shifting token '+' (3.2: ) -Entering state 15 -Reading a token -Next token is token ID (3.4: ) -Shifting token ID (3.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.4: ) --> $$ = nterm expr (3.4: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (3.0: ) - $2 = token '+' (3.2: ) - $3 = nterm expr (3.4: ) --> $$ = nterm expr (3.0-4: ) -Entering state 8 -Reading a token -Next token is token ';' (3.5: ) -Shifting token ';' (3.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (3.0-4: ) - $2 = token ';' (3.5: ) --> $$ = nterm stmt (3.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1: ) - $2 = nterm stmt (3.0-5: ) --> $$ = nterm prog (1.1-3.5: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (5.0: ) -Shifting token TYPENAME (5.0: ) -Entering state 4 -Reading a token -Next token is token ID (5.2: ) -Shifting token ID (5.2: ) -Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (5.2: ) --> $$ = nterm declarator (5.2: ) -Entering state 13 -Reading a token -Next token is token ';' (5.3: ) -Shifting token ';' (5.3: ) -Entering state 23 -Reducing stack 0 by rule 11 (line 97): - $1 = token TYPENAME (5.0: ) - $2 = nterm declarator (5.2: ) - $3 = token ';' (5.3: ) --> $$ = nterm decl (5.0-3: ) -Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (5.0-3: ) --> $$ = nterm stmt (5.0-3: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-3.5: ) - $2 = nterm stmt (5.0-3: ) --> $$ = nterm prog (1.1-5.3: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (7.0: ) -Shifting token TYPENAME (7.0: ) -Entering state 4 -Reading a token -Next token is token ID (7.2: ) -Shifting token ID (7.2: ) -Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (7.2: ) --> $$ = nterm declarator (7.2: ) -Entering state 13 -Reading a token -Next token is token '=' (7.4: ) -Shifting token '=' (7.4: ) -Entering state 22 -Reading a token -Next token is token ID (7.6: ) -Shifting token ID (7.6: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (7.6: ) --> $$ = nterm expr (7.6: ) -Entering state 29 -Reading a token -Next token is token ';' (7.7: ) -Shifting token ';' (7.7: ) -Entering state 30 -Reducing stack 0 by rule 12 (line 99): - $1 = token TYPENAME (7.0: ) - $2 = nterm declarator (7.2: ) - $3 = token '=' (7.4: ) - $4 = nterm expr (7.6: ) - $5 = token ';' (7.7: ) --> $$ = nterm decl (7.0-7: ) -Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (7.0-7: ) --> $$ = nterm stmt (7.0-7: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-5.3: ) - $2 = nterm stmt (7.0-7: ) --> $$ = nterm prog (1.1-7.7: ) -Entering state 1 -Reading a token -Next token is token ID (9.0: ) -Shifting token ID (9.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.0: ) --> $$ = nterm expr (9.0: ) -Entering state 8 -Reading a token -Next token is token '=' (9.2: ) -Shifting token '=' (9.2: ) -Entering state 14 -Reading a token -Next token is token ID (9.4: ) -Shifting token ID (9.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.4: ) --> $$ = nterm expr (9.4: ) -Entering state 24 -Reading a token -Next token is token ';' (9.5: ) -Reducing stack 0 by rule 10 (line 94): - $1 = nterm expr (9.0: ) - $2 = token '=' (9.2: ) - $3 = nterm expr (9.4: ) --> $$ = nterm expr (9.0-4: ) -Entering state 8 -Next token is token ';' (9.5: ) -Shifting token ';' (9.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (9.0-4: ) - $2 = token ';' (9.5: ) --> $$ = nterm stmt (9.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-7.7: ) - $2 = nterm stmt (9.0-5: ) --> $$ = nterm prog (1.1-9.5: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (11.0: ) -Shifting token TYPENAME (11.0: ) -Entering state 4 -Reading a token -Next token is token '(' (11.2: ) -Shifting token '(' (11.2: ) -Entering state 12 -Reading a token -Next token is token ID (11.3: ) -Shifting token ID (11.3: ) -Entering state 18 -Reading a token -Next token is token ')' (11.4: ) -Stack 0 Entering state 18 -Next token is token ')' (11.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (11.4: ) -Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -On stack 0, shifting token ')' (11.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (11.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '+' (11.6: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '+' (11.6: ) -Stack 1 dies. -Removing dead stacks. -On stack 0, shifting token '+' (11.6: ) -Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (11.3: ) --> $$ = nterm expr (11.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (11.0: ) - $2 = token '(' (11.2: ) - $3 = nterm expr (11.3: ) - $4 = token ')' (11.4: ) --> $$ = nterm expr (11.0-4: ) -Returning to deterministic operation. -Entering state 15 -Reading a token -Next token is token ID (11.8: ) -Shifting token ID (11.8: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (11.8: ) --> $$ = nterm expr (11.8: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (11.0-4: ) - $2 = token '+' (11.6: ) - $3 = nterm expr (11.8: ) --> $$ = nterm expr (11.0-8: ) -Entering state 8 -Reading a token -Next token is token ';' (11.9: ) -Shifting token ';' (11.9: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (11.0-8: ) - $2 = token ';' (11.9: ) --> $$ = nterm stmt (11.0-9: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-9.5: ) - $2 = nterm stmt (11.0-9: ) --> $$ = nterm prog (1.1-11.9: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (13.0: ) -Shifting token TYPENAME (13.0: ) -Entering state 4 -Reading a token -Next token is token '(' (13.2: ) -Shifting token '(' (13.2: ) -Entering state 12 -Reading a token -Next token is token ID (13.3: ) -Shifting token ID (13.3: ) -Entering state 18 -Reading a token -Next token is token ')' (13.4: ) -Stack 0 Entering state 18 -Next token is token ')' (13.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (13.4: ) -Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -On stack 0, shifting token ')' (13.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (13.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token ';' (13.5: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token ';' (13.5: ) -On stack 0, shifting token ';' (13.5: ) -Stack 0 now in state 16 -On stack 1, shifting token ';' (13.5: ) -Stack 1 now in state 23 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME (15.0: ) -Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME (15.0: ) -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (13.3: ) --> $$ = nterm declarator (13.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (13.2: ) - $2 = nterm declarator (13.3: ) - $3 = token ')' (13.4: ) --> $$ = nterm declarator (13.2-4: ) -Reducing stack -1 by rule 11 (line 97): - $1 = token TYPENAME (13.0: ) - $2 = nterm declarator (13.2-4: ) - $3 = token ';' (13.5: ) --> $$ = nterm decl (13.0-5: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (13.0-5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-11.9: ) - $2 = nterm stmt (13.0-5: ) --> $$ = nterm prog (1.1-13.5: ) -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' (15.2: ) -Shifting token '(' (15.2: ) -Entering state 12 -Reading a token -Next token is token ID (15.3: ) -Shifting token ID (15.3: ) -Entering state 18 -Reading a token -Next token is token ')' (15.4: ) -Stack 0 Entering state 18 -Next token is token ')' (15.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (15.4: ) -Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -On stack 0, shifting token ')' (15.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (15.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '=' (15.6: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '=' (15.6: ) -On stack 0, shifting token '=' (15.6: ) -Stack 0 now in state 14 -On stack 1, shifting token '=' (15.6: ) -Stack 1 now in state 22 -Stack 0 Entering state 14 -Reading a token -Next token is token ID (15.8: ) -Stack 1 Entering state 22 -Next token is token ID (15.8: ) -On stack 0, shifting token ID (15.8: ) -Stack 0 now in state 5 -On stack 1, shifting token ID (15.8: ) -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token '+' (15.10: ) -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token '+' (15.10: ) -On stack 0, shifting token '+' (15.10: ) -Stack 0 now in state 15 -On stack 1, shifting token '+' (15.10: ) -Stack 1 now in state 15 -Stack 0 Entering state 15 -Reading a token -Next token is token ID (15.12: ) -Stack 1 Entering state 15 -Next token is token ID (15.12: ) -On stack 0, shifting token ID (15.12: ) -Stack 0 now in state 5 -On stack 1, shifting token ID (15.12: ) -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. -Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token ';' (15.13: ) -Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. -Stack 0 Entering state 8 -Next token is token ';' (15.13: ) -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. -Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token ';' (15.13: ) -On stack 0, shifting token ';' (15.13: ) -Stack 0 now in state 16 -On stack 1, shifting token ';' (15.13: ) -Stack 1 now in state 30 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME (17.0: ) -Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME (17.0: ) -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (15.3: ) --> $$ = nterm declarator (15.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (15.2: ) - $2 = nterm declarator (15.3: ) - $3 = token ')' (15.4: ) --> $$ = nterm declarator (15.2-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 12 (line 99): - $1 = token TYPENAME (15.0: ) - $2 = nterm declarator (15.2-4: ) - $3 = token '=' (15.6: ) - $4 = nterm expr (15.8-12: ) - $5 = token ';' (15.13: ) --> $$ = nterm decl (15.0-13: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (15.0-13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-13.5: ) - $2 = nterm stmt (15.0-13: ) --> $$ = nterm prog (1.1-15.13: ) + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' (17.2: ) -Shifting token '(' (17.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (17.3: ) -Shifting token ID (17.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ID (17.5: ) -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (17.3: ) --> $$ = nterm expr (17.3: ) +Next token is token ID () +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 20 -Next token is token ID (17.5: ) -17.5: syntax error -Error: popping nterm expr (17.3: ) -Error: popping token '(' (17.2: ) -Error: popping token TYPENAME (17.0: ) -Shifting token error (17.0-5: ) +Next token is token ID () +syntax error +Error: popping nterm expr () +Error: popping token '(' () +Error: popping token TYPENAME () +Shifting token error () Entering state 3 -Next token is token ID (17.5: ) -Error: discarding token ID (17.5: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token ')' (17.6: ) -Error: discarding token ')' (17.6: ) +Next token is token ')' () +Error: discarding token ')' () Reading a token -Next token is token '=' (17.8: ) -Error: discarding token '=' (17.8: ) +Next token is token '=' () +Error: discarding token '=' () Reading a token -Next token is token ID (17.10: ) -Error: discarding token ID (17.10: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token '+' (17.12: ) -Error: discarding token '+' (17.12: ) +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token ID (17.14: ) -Error: discarding token ID (17.14: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token ';' (17.15: ) +Next token is token ';' () Entering state 3 -Next token is token ';' (17.15: ) -Shifting token ';' (17.15: ) +Next token is token ';' () +Shifting token ';' () Entering state 10 -Reducing stack 0 by rule 5 (line 86): - $1 = token error (17.0-14: ) - $2 = token ';' (17.15: ) --> $$ = nterm stmt (17.0-15: ) +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-15.13: ) - $2 = nterm stmt (17.0-15: ) --> $$ = nterm prog (1.1-17.15: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token ID (19.0: ) -Shifting token ID (19.0: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.0: ) --> $$ = nterm expr (19.0: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token '+' (19.2: ) -Shifting token '+' (19.2: ) +Next token is token '+' () +Shifting token '+' () Entering state 15 Reading a token -Next token is token ID (19.4: ) -Shifting token ID (19.4: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.4: ) --> $$ = nterm expr (19.4: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (19.0: ) - $2 = token '+' (19.2: ) - $3 = nterm expr (19.4: ) --> $$ = nterm expr (19.0-4: ) +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token ';' (19.5: ) -Shifting token ';' (19.5: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (19.0-4: ) - $2 = token ';' (19.5: ) --> $$ = nterm stmt (19.0-5: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-17.15: ) - $2 = nterm stmt (19.0-5: ) --> $$ = nterm prog (1.1-19.5: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token '@' (21.0: ) -Shifting token '@' (21.0: ) +Next token is token '@' () +Shifting token '@' () Entering state 6 -Reducing stack 0 by rule 6 (line 87): - $1 = token '@' (21.0: ) -Cleanup: popping nterm prog (1.1-19.5: ) -./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () +stderr: +./cxx-type.at:417: $PREPARSER ./types -p test-input +syntax error stderr: +./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 71): @@ -255178,184 +251697,9 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) -./glr-regression.at:355: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS -708. cxx-type.at:426: ok - -719. glr-regression.at:356: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr2.cc ... -./glr-regression.at:356: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y -stderr: -stdout: -stderr: -./c++.at:849: $PREPARSER ./input -stderr: -stdout: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:1361: $PREPARSER ./input aaaas -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -======== Testing with C++ standard flags: '' -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./glr-regression.at:356: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS -./c++.at:1361: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1361: $PREPARSER ./input aaaap -stdout: -./c++.at:1502: $PREPARSER ./parser -stderr: -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1502: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap -stderr: -690. c++.at:1422: ok -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x55da8e7bbf40->Object::Object { } -Next token is token 'a' (0x55da8e7bbf40 'a') -Shifting token 'a' (0x55da8e7bbf40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55da8e7bbf40 'a') --> $$ = nterm item (0x55da8e7bbf40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x55da8e7bbf90->Object::Object { 0x55da8e7bbf40 } -Next token is token 'a' (0x55da8e7bbf90 'a') -Shifting token 'a' (0x55da8e7bbf90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55da8e7bbf90 'a') --> $$ = nterm item (0x55da8e7bbf90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x55da8e7bbfe0->Object::Object { 0x55da8e7bbf40, 0x55da8e7bbf90 } -Next token is token 'a' (0x55da8e7bbfe0 'a') -Shifting token 'a' (0x55da8e7bbfe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55da8e7bbfe0 'a') --> $$ = nterm item (0x55da8e7bbfe0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x55da8e7bc030->Object::Object { 0x55da8e7bbf40, 0x55da8e7bbf90, 0x55da8e7bbfe0 } -Next token is token 'a' (0x55da8e7bc030 'a') -Shifting token 'a' (0x55da8e7bc030 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55da8e7bc030 'a') --> $$ = nterm item (0x55da8e7bc030 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x55da8e7bc080->Object::Object { 0x55da8e7bbf40, 0x55da8e7bbf90, 0x55da8e7bbfe0, 0x55da8e7bc030 } -Next token is token 'p' (0x55da8e7bc080 'p'Exception caught: cleaning lookahead and stack -0x55da8e7bc080->Object::~Object { 0x55da8e7bbf40, 0x55da8e7bbf90, 0x55da8e7bbfe0, 0x55da8e7bc030, 0x55da8e7bc080 } -0x55da8e7bc030->Object::~Object { 0x55da8e7bbf40, 0x55da8e7bbf90, 0x55da8e7bbfe0, 0x55da8e7bc030 } -0x55da8e7bbfe0->Object::~Object { 0x55da8e7bbf40, 0x55da8e7bbf90, 0x55da8e7bbfe0 } -0x55da8e7bbf90->Object::~Object { 0x55da8e7bbf40, 0x55da8e7bbf90 } -0x55da8e7bbf40->Object::~Object { 0x55da8e7bbf40 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x55da8e7bbf40->Object::Object { } -Next token is token 'a' (0x55da8e7bbf40 'a') -Shifting token 'a' (0x55da8e7bbf40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55da8e7bbf40 'a') --> $$ = nterm item (0x55da8e7bbf40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x55da8e7bbf90->Object::Object { 0x55da8e7bbf40 } -Next token is token 'a' (0x55da8e7bbf90 'a') -Shifting token 'a' (0x55da8e7bbf90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55da8e7bbf90 'a') --> $$ = nterm item (0x55da8e7bbf90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x55da8e7bbfe0->Object::Object { 0x55da8e7bbf40, 0x55da8e7bbf90 } -Next token is token 'a' (0x55da8e7bbfe0 'a') -Shifting token 'a' (0x55da8e7bbfe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55da8e7bbfe0 'a') --> $$ = nterm item (0x55da8e7bbfe0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x55da8e7bc030->Object::Object { 0x55da8e7bbf40, 0x55da8e7bbf90, 0x55da8e7bbfe0 } -Next token is token 'a' (0x55da8e7bc030 'a') -Shifting token 'a' (0x55da8e7bc030 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55da8e7bc030 'a') --> $$ = nterm item (0x55da8e7bc030 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x55da8e7bc080->Object::Object { 0x55da8e7bbf40, 0x55da8e7bbf90, 0x55da8e7bbfe0, 0x55da8e7bc030 } -Next token is token 'p' (0x55da8e7bc080 'p'Exception caught: cleaning lookahead and stack -0x55da8e7bc080->Object::~Object { 0x55da8e7bbf40, 0x55da8e7bbf90, 0x55da8e7bbfe0, 0x55da8e7bc030, 0x55da8e7bc080 } -0x55da8e7bc030->Object::~Object { 0x55da8e7bbf40, 0x55da8e7bbf90, 0x55da8e7bbfe0, 0x55da8e7bc030 } -0x55da8e7bbfe0->Object::~Object { 0x55da8e7bbf40, 0x55da8e7bbf90, 0x55da8e7bbfe0 } -0x55da8e7bbf90->Object::~Object { 0x55da8e7bbf40, 0x55da8e7bbf90 } -0x55da8e7bbf40->Object::~Object { 0x55da8e7bbf40 } -exception caught: printer -end { } - -./c++.at:1361: grep '^exception caught: printer$' stderr -stderr: -stdout: -./cxx-type.at:435: $PREPARSER ./types test-input -stdout: -exception caught: printer -stderr: -./c++.at:1361: $PREPARSER ./input aaaae -syntax error -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 -./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: syntax error -./cxx-type.at:435: $PREPARSER ./types -p test-input -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -255673,8 +252017,7 @@ -> $$ = nterm decl () Reducing stack -1 by rule 4 (line 75): $1 = nterm decl () --stderr: -> $$ = nterm stmt () +-> $$ = nterm stmt () Reducing stack -1 by rule 7 (line 80): $1 = token ID () -> $$ = nterm expr () @@ -255965,637 +252308,590 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -stdout: -./c++.at:1361: $PREPARSER ./input aaaaE -./cxx-type.at:447: $PREPARSER ./types test-input -720. glr-regression.at:488: testing Improper merging of GLR delayed action sets: glr.c ... -./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.c glr-regr3.y -stderr: -syntax error stderr: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +709. cxx-type.at:432: ok +stdout: +./cxx-type.at:441: $PREPARSER ./types test-input +stdout: stderr: +./cxx-type.at:423: $PREPARSER ./types test-input +./glr-regression.at:356: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 64): --> $$ = nterm prog () +Reducing stack 0 by rule 1 (line 71): +-> $$ = nterm prog (1.1: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.0: ) +Shifting token ID (3.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.0: ) +-> $$ = nterm expr (3.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (3.2: ) +Shifting token '+' (3.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.4: ) +Shifting token ID (3.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.4: ) +-> $$ = nterm expr (3.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (3.0: ) + $2 = token '+' (3.2: ) + $3 = nterm expr (3.4: ) +-> $$ = nterm expr (3.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (3.5: ) +Shifting token ';' (3.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (3.0-4: ) + $2 = token ';' (3.5: ) +-> $$ = nterm stmt (3.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1: ) + $2 = nterm stmt (3.0-5: ) +-> $$ = nterm prog (1.1-3.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (5.0: ) +Shifting token TYPENAME (5.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (5.2: ) +Shifting token ID (5.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (5.2: ) +-> $$ = nterm declarator (5.2: ) Entering state 13 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (5.3: ) +Shifting token ';' (5.3: ) Entering state 23 -Reducing stack 0 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 11 (line 97): + $1 = token TYPENAME (5.0: ) + $2 = nterm declarator (5.2: ) + $3 = token ';' (5.3: ) +-> $$ = nterm decl (5.0-3: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (5.0-3: ) +-> $$ = nterm stmt (5.0-3: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-3.5: ) + $2 = nterm stmt (5.0-3: ) +-> $$ = nterm prog (1.1-5.3: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (7.0: ) +Shifting token TYPENAME (7.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.2: ) +Shifting token ID (7.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (7.2: ) +-> $$ = nterm declarator (7.2: ) Entering state 13 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.4: ) +Shifting token '=' (7.4: ) Entering state 22 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.6: ) +Shifting token ID (7.6: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (7.6: ) +-> $$ = nterm expr (7.6: ) Entering state 29 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (7.7: ) +Shifting token ';' (7.7: ) Entering state 30 -Reducing stack 0 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 12 (line 99): + $1 = token TYPENAME (7.0: ) + $2 = nterm declarator (7.2: ) + $3 = token '=' (7.4: ) + $4 = nterm expr (7.6: ) + $5 = token ';' (7.7: ) +-> $$ = nterm decl (7.0-7: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (7.0-7: ) +-> $$ = nterm stmt (7.0-7: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-5.3: ) + $2 = nterm stmt (7.0-7: ) +-> $$ = nterm prog (1.1-7.7: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.0: ) +Shifting token ID (9.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.0: ) +-> $$ = nterm expr (9.0: ) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.2: ) +Shifting token '=' (9.2: ) Entering state 14 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.4: ) +Shifting token ID (9.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.4: ) +-> $$ = nterm expr (9.4: ) Entering state 24 Reading a token -Next token is token ';' () -Reducing stack 0 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () +Next token is token ';' (9.5: ) +Reducing stack 0 by rule 10 (line 94): + $1 = nterm expr (9.0: ) + $2 = token '=' (9.2: ) + $3 = nterm expr (9.4: ) +-> $$ = nterm expr (9.0-4: ) Entering state 8 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (9.5: ) +Shifting token ';' (9.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (9.0-4: ) + $2 = token ';' (9.5: ) +-> $$ = nterm stmt (9.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-7.7: ) + $2 = nterm stmt (9.0-5: ) +-> $$ = nterm prog (1.1-9.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (11.0: ) +Shifting token TYPENAME (11.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (11.2: ) +Shifting token '(' (11.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.3: ) +Shifting token ID (11.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (11.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (11.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (11.4: ) +On stack 0, shifting token ')' (11.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (11.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 dies. Removing dead stacks. -On stack 0, shifting token '+' () +On stack 0, shifting token '+' (11.6: ) Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (11.3: ) +-> $$ = nterm expr (11.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (11.0: ) + $2 = token '(' (11.2: ) + $3 = nterm expr (11.3: ) + $4 = token ')' (11.4: ) +-> $$ = nterm expr (11.0-4: ) Returning to deterministic operation. Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.8: ) +Shifting token ID (11.8: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (11.8: ) +-> $$ = nterm expr (11.8: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (11.0-4: ) + $2 = token '+' (11.6: ) + $3 = nterm expr (11.8: ) +-> $$ = nterm expr (11.0-8: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (11.9: ) +Shifting token ';' (11.9: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (11.0-8: ) + $2 = token ';' (11.9: ) +-> $$ = nterm stmt (11.0-9: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-9.5: ) + $2 = nterm stmt (11.0-9: ) +-> $$ = nterm prog (1.1-11.9: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (13.0: ) +Shifting token TYPENAME (13.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.2: ) +Shifting token '(' (13.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (13.3: ) +Shifting token ID (13.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (13.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (13.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (13.4: ) +On stack 0, shifting token ')' (13.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (13.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token ';' () +Next token is token ';' (13.5: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (13.5: ) +On stack 0, shifting token ';' (13.5: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (13.5: ) Stack 1 now in state 23 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (15.0: ) Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (15.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (13.3: ) +-> $$ = nterm declarator (13.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (13.2: ) + $2 = nterm declarator (13.3: ) + $3 = token ')' (13.4: ) +-> $$ = nterm declarator (13.2-4: ) +Reducing stack -1 by rule 11 (line 97): + $1 = token TYPENAME (13.0: ) + $2 = nterm declarator (13.2-4: ) + $3 = token ';' (13.5: ) +-> $$ = nterm decl (13.0-5: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (13.0-5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-11.9: ) + $2 = nterm stmt (13.0-5: ) +-> $$ = nterm prog (1.1-13.5: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (15.2: ) +Shifting token '(' (15.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (15.3: ) +Shifting token ID (15.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (15.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (15.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (15.4: ) +On stack 0, shifting token ')' (15.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (15.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '=' () +Next token is token '=' (15.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '=' () -On stack 0, shifting token '=' () +Next token is token '=' (15.6: ) +On stack 0, shifting token '=' (15.6: ) Stack 0 now in state 14 -On stack 1, shifting token '=' () +On stack 1, shifting token '=' (15.6: ) Stack 1 now in state 22 Stack 0 Entering state 14 Reading a token -Next token is token ID () +Next token is token ID (15.8: ) Stack 1 Entering state 22 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.8: ) +On stack 0, shifting token ID (15.8: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.8: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token '+' () +Next token is token '+' (15.10: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token '+' () -On stack 0, shifting token '+' () +Next token is token '+' (15.10: ) +On stack 0, shifting token '+' (15.10: ) Stack 0 now in state 15 -On stack 1, shifting token '+' () +On stack 1, shifting token '+' (15.10: ) Stack 1 now in state 15 Stack 0 Entering state 15 Reading a token -Next token is token ID () +Next token is token ID (15.12: ) Stack 1 Entering state 15 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.12: ) +On stack 0, shifting token ID (15.12: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.12: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token ';' () -Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Next token is token ';' (15.13: ) +Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. Stack 0 Entering state 8 -Next token is token ';' () +Next token is token ';' (15.13: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (15.13: ) +On stack 0, shifting token ';' (15.13: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (15.13: ) Stack 1 now in state 30 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (17.0: ) Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (17.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (15.3: ) +-> $$ = nterm declarator (15.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (15.2: ) + $2 = nterm declarator (15.3: ) + $3 = token ')' (15.4: ) +-> $$ = nterm declarator (15.2-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 12 (line 99): + $1 = token TYPENAME (15.0: ) + $2 = nterm declarator (15.2-4: ) + $3 = token '=' (15.6: ) + $4 = nterm expr (15.8-12: ) + $5 = token ';' (15.13: ) +-> $$ = nterm decl (15.0-13: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (15.0-13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-13.5: ) + $2 = nterm stmt (15.0-13: ) +-> $$ = nterm prog (1.1-15.13: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (17.2: ) +Shifting token '(' (17.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (17.3: ) +Shifting token ID (17.3: ) Entering state 18 Reading a token -Next token is token ID () -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Next token is token ID (17.5: ) +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (17.3: ) +-> $$ = nterm expr (17.3: ) Entering state 20 -Next token is token ID () -syntax error -Error: popping nterm expr () -Error: popping token '(' () -Error: popping token TYPENAME () -Shifting token error () +Next token is token ID (17.5: ) +17.5: syntax error +Error: popping nterm expr (17.3: ) +Error: popping token '(' (17.2: ) +Error: popping token TYPENAME (17.0: ) +Shifting token error (17.0-5: ) Entering state 3 -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.5: ) +Error: discarding token ID (17.5: ) Reading a token -Next token is token ')' () -Error: discarding token ')' () +Next token is token ')' (17.6: ) +Error: discarding token ')' (17.6: ) Reading a token -Next token is token '=' () -Error: discarding token '=' () +Next token is token '=' (17.8: ) +Error: discarding token '=' (17.8: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.10: ) +Error: discarding token ID (17.10: ) Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '+' (17.12: ) +Error: discarding token '+' (17.12: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.14: ) +Error: discarding token ID (17.14: ) Reading a token -Next token is token ';' () +Next token is token ';' (17.15: ) Entering state 3 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (17.15: ) +Shifting token ';' (17.15: ) Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 5 (line 86): + $1 = token error (17.0-14: ) + $2 = token ';' (17.15: ) +-> $$ = nterm stmt (17.0-15: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-15.13: ) + $2 = nterm stmt (17.0-15: ) +-> $$ = nterm prog (1.1-17.15: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.0: ) +Shifting token ID (19.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.0: ) +-> $$ = nterm expr (19.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (19.2: ) +Shifting token '+' (19.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.4: ) +Shifting token ID (19.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.4: ) +-> $$ = nterm expr (19.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (19.0: ) + $2 = token '+' (19.2: ) + $3 = nterm expr (19.4: ) +-> $$ = nterm expr (19.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (19.5: ) +Shifting token ';' (19.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (19.0-4: ) + $2 = token ';' (19.5: ) +-> $$ = nterm stmt (19.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-17.15: ) + $2 = nterm stmt (19.0-5: ) +-> $$ = nterm prog (1.1-19.5: ) Entering state 1 Reading a token -Next token is token '@' () -Shifting token '@' () +Next token is token '@' (21.0: ) +Shifting token '@' (21.0: ) Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () -stdout: -./cxx-type.at:417: $PREPARSER ./types test-input -709. cxx-type.at:432: ok -./cxx-type.at:447: $PREPARSER ./types -p test-input +Reducing stack 0 by rule 6 (line 87): + $1 = token '@' (21.0: ) +Cleanup: popping nterm prog (1.1-19.5: ) stderr: -./c++.at:1361: $PREPARSER ./input aaaaT -17.5: syntax error +706. cxx-type.at:415: ./cxx-type.at:447: $PREPARSER ./types -p test-input stderr: -./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +17.5: syntax error +syntax error + ok +./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -257205,13 +253501,11 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () + ./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:417: $PREPARSER ./types -p test-input +./cxx-type.at:441: $PREPARSER ./types -p test-input +./cxx-type.at:423: $PREPARSER ./types -p test-input stderr: -./c++.at:1361: $PREPARSER ./input aaaaR - -./glr-regression.at:488: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr3 glr-regr3.c $LIBS Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -257820,8 +254114,9 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () +711. cxx-type.at:444: ok stderr: -stderr: + Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 71): @@ -258113,9 +254408,6 @@ Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -711. cxx-type.at:444: ok -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS Next token is token TYPENAME (15.0: ) Stack 1 Entering state 23 Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. @@ -258143,6 +254435,19 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (13.0-5: ) -> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (13.3: ) +-> $$ = nterm expr (13.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (13.0: ) + $2 = token '(' (13.2: ) + $3 = nterm expr (13.3: ) + $4 = token ')' (13.4: ) +-> $$ = nterm expr (13.0-4: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (13.0-4: ) + $2 = token ';' (13.5: ) +-> $$ = nterm stmt (13.0-5: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-11.9: ) $2 = nterm stmt (13.0-5: ) @@ -258284,6 +254589,35 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (15.0-13: ) -> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.3: ) +-> $$ = nterm expr (15.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (15.0: ) + $2 = token '(' (15.2: ) + $3 = nterm expr (15.3: ) + $4 = token ')' (15.4: ) +-> $$ = nterm expr (15.0-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 10 (line 94): + $1 = nterm expr (15.0-4: ) + $2 = token '=' (15.6: ) + $3 = nterm expr (15.8-12: ) +-> $$ = nterm expr (15.0-12: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (15.0-12: ) + $2 = token ';' (15.13: ) +-> $$ = nterm stmt (15.0-13: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-13.5: ) $2 = nterm stmt (15.0-13: ) @@ -258391,9 +254725,575 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) -./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +Starting parse +Entering state 0 +Reducing stack 0 by rule 1 (line 64): +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 23 +Reducing stack 0 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 22 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 29 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 30 +Reducing stack 0 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 14 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 24 +Reading a token +Next token is token ';' () +Reducing stack 0 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '+' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '+' () +Stack 1 dies. +Removing dead stacks. +On stack 0, shifting token '+' () +Stack 0 now in state 15 +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Returning to deterministic operation. +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token ';' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 23 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 23 +Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '=' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '=' () +On stack 0, shifting token '=' () +Stack 0 now in state 14 +On stack 1, shifting token '=' () +Stack 1 now in state 22 +Stack 0 Entering state 14 +Reading a token +Next token is token ID () +Stack 1 Entering state 22 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token '+' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token '+' () +On stack 0, shifting token '+' () +Stack 0 now in state 15 +On stack 1, shifting token '+' () +Stack 1 now in state 15 +Stack 0 Entering state 15 +Reading a token +Next token is token ID () +Stack 1 Entering state 15 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Stack 0 Entering state 25 +Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token ';' () +Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Stack 0 Entering state 8 +Next token is token ';' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Stack 1 Entering state 25 +Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 30 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 30 +Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ID () +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 20 +Next token is token ID () +syntax error +Error: popping nterm expr () +Error: popping token '(' () +Error: popping token TYPENAME () +Shifting token error () +Entering state 3 +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ')' () +Error: discarding token ')' () +Reading a token +Next token is token '=' () +Error: discarding token '=' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ';' () +Entering state 3 +Next token is token ';' () +Shifting token ';' () +Entering state 10 +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token '@' () +Shifting token '@' () +Entering state 6 +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () +./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -258713,6 +255613,19 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (13.0-5: ) -> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (13.3: ) +-> $$ = nterm expr (13.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (13.0: ) + $2 = token '(' (13.2: ) + $3 = nterm expr (13.3: ) + $4 = token ')' (13.4: ) +-> $$ = nterm expr (13.0-4: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (13.0-4: ) + $2 = token ';' (13.5: ) +-> $$ = nterm stmt (13.0-5: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-11.9: ) $2 = nterm stmt (13.0-5: ) @@ -258854,6 +255767,35 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (15.0-13: ) -> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.3: ) +-> $$ = nterm expr (15.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (15.0: ) + $2 = token '(' (15.2: ) + $3 = nterm expr (15.3: ) + $4 = token ')' (15.4: ) +-> $$ = nterm expr (15.0-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 10 (line 94): + $1 = nterm expr (15.0-4: ) + $2 = token '=' (15.6: ) + $3 = nterm expr (15.8-12: ) +-> $$ = nterm expr (15.0-12: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (15.0-12: ) + $2 = token ';' (15.13: ) +-> $$ = nterm stmt (15.0-13: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-13.5: ) $2 = nterm stmt (15.0-13: ) @@ -258961,47 +255903,11 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) - -706. cxx-type.at:415: ok -721. glr-regression.at:489: testing Improper merging of GLR delayed action sets: glr.cc ... -./glr-regression.at:489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y - -722. glr-regression.at:490: testing Improper merging of GLR delayed action sets: glr2.cc ... -./glr-regression.at:490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y -stderr: -stdout: -./c++.at:941: $PREPARSER ./input -723. glr-regression.at:592: testing Duplicate representation of merged trees: %union { char *ptr; } glr.c ... -./glr-regression.at:592: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./glr-regression.at:592: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS -./glr-regression.at:490: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS -./glr-regression.at:489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS -stderr: -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -./types.at:139: $PREPARSER ./test +710. cxx-type.at:438: ok +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -441. types.at:139: ok stderr: stdout: -./cxx-type.at:458: $PREPARSER ./types test-input -stderr: -syntax error, unexpected ID, expecting '=' or '+' or ')' - -./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:458: $PREPARSER ./types -p test-input -stderr: Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -259320,19 +256226,6 @@ Reducing stack -1 by rule 4 (line 75): $1 = nterm decl () -> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () Reducing stack -1 by rule 2 (line 65): $1 = nterm prog () $2 = nterm stmt () @@ -259474,35 +256367,6 @@ Reducing stack -1 by rule 4 (line 75): $1 = nterm decl () -> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () Reducing stack -1 by rule 2 (line 65): $1 = nterm prog () $2 = nterm stmt () @@ -259524,7 +256388,7 @@ -> $$ = nterm expr () Entering state 20 Next token is token ID () -syntax error, unexpected ID, expecting '=' or '+' or ')' +syntax error Error: popping nterm expr () Error: popping token '(' () Error: popping token TYPENAME () @@ -259610,641 +256474,708 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./c++.at:569: $here/modern +707. cxx-type.at:420: ok + +stderr: +stdout: +stdout: +stderr: +stdout: +Modern C++: 201703 +./c++.at:569: $PREPARSER ./list +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./cxx-type.at:452: $PREPARSER ./types test-input +stderr: +stderr: +17.5: syntax error +stderr: + +stdout: +./cxx-type.at:429: $PREPARSER ./types test-input +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +17.5: syntax error +stdout: +./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +721. glr-regression.at:489: testing Improper merging of GLR delayed action sets: glr.cc ... +./c++.at:570: $here/modern +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +720. glr-regression.at:488: testing Improper merging of GLR delayed action sets: glr.c ... +./cxx-type.at:452: $PREPARSER ./types -p test-input +stderr: +./glr-regression.at:489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y +./glr-regression.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.c glr-regr3.y +stdout: +Modern C++: 201703 +stdout: +./c++.at:570: $PREPARSER ./list +./glr-regression.at:205: $PREPARSER ./glr-regr1 BPBPB +stderr: +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +722. glr-regression.at:490: testing Improper merging of GLR delayed action sets: glr2.cc ... +./cxx-type.at:429: $PREPARSER ./types -p test-input +stderr: +./glr-regression.at:205: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 64): --> $$ = nterm prog () +Reducing stack 0 by rule 1 (line 71): +-> $$ = nterm prog (1.1: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.0: ) +Shifting token ID (3.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.0: ) +-> $$ = nterm expr (3.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (3.2: ) +Shifting token '+' (3.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.4: ) +Shifting token ID (3.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.4: ) +-> $$ = nterm expr (3.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (3.0: ) + $2 = token '+' (3.2: ) + $3 = nterm expr (3.4: ) +-> $$ = nterm expr (3.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (3.5: ) +Shifting token ';' (3.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (3.0-4: ) + $2 = token ';' (3.5: ) +-> $$ = nterm stmt (3.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1: ) + $2 = nterm stmt (3.0-5: ) +-> $$ = nterm prog (1.1-3.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (5.0: ) +Shifting token TYPENAME (5.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (5.2: ) +Shifting token ID (5.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (5.2: ) +-> $$ = nterm declarator (5.2: ) Entering state 13 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (5.3: ) +Shifting token ';' (5.3: ) Entering state 23 -Reducing stack 0 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 11 (line 97): + $1 = token TYPENAME (5.0: ) + $2 = nterm declarator (5.2: ) + $3 = token ';' (5.3: ) +-> $$ = nterm decl (5.0-3: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (5.0-3: ) +-> $$ = nterm stmt (5.0-3: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-3.5: ) + $2 = nterm stmt (5.0-3: ) +-> $$ = nterm prog (1.1-5.3: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (7.0: ) +Shifting token TYPENAME (7.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.2: ) +Shifting token ID (7.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (7.2: ) +-> $$ = nterm declarator (7.2: ) Entering state 13 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.4: ) +Shifting token '=' (7.4: ) Entering state 22 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.6: ) +Shifting token ID (7.6: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (7.6: ) +-> $$ = nterm expr (7.6: ) Entering state 29 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (7.7: ) +Shifting token ';' (7.7: ) Entering state 30 -Reducing stack 0 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 12 (line 99): + $1 = token TYPENAME (7.0: ) + $2 = nterm declarator (7.2: ) + $3 = token '=' (7.4: ) + $4 = nterm expr (7.6: ) + $5 = token ';' (7.7: ) +-> $$ = nterm decl (7.0-7: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (7.0-7: ) +-> $$ = nterm stmt (7.0-7: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-5.3: ) + $2 = nterm stmt (7.0-7: ) +-> $$ = nterm prog (1.1-7.7: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.0: ) +Shifting token ID (9.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.0: ) +-> $$ = nterm expr (9.0: ) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.2: ) +Shifting token '=' (9.2: ) Entering state 14 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.4: ) +Shifting token ID (9.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.4: ) +-> $$ = nterm expr (9.4: ) Entering state 24 Reading a token -Next token is token ';' () -Reducing stack 0 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () +Next token is token ';' (9.5: ) +Reducing stack 0 by rule 10 (line 94): + $1 = nterm expr (9.0: ) + $2 = token '=' (9.2: ) + $3 = nterm expr (9.4: ) +-> $$ = nterm expr (9.0-4: ) Entering state 8 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (9.5: ) +Shifting token ';' (9.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (9.0-4: ) + $2 = token ';' (9.5: ) +-> $$ = nterm stmt (9.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-7.7: ) + $2 = nterm stmt (9.0-5: ) +-> $$ = nterm prog (1.1-9.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (11.0: ) +Shifting token TYPENAME (11.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (11.2: ) +Shifting token '(' (11.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.3: ) +Shifting token ID (11.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (11.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (11.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (11.4: ) +On stack 0, shifting token ')' (11.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (11.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 dies. Removing dead stacks. -On stack 0, shifting token '+' () +On stack 0, shifting token '+' (11.6: ) Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (11.3: ) +-> $$ = nterm expr (11.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (11.0: ) + $2 = token '(' (11.2: ) + $3 = nterm expr (11.3: ) + $4 = token ')' (11.4: ) +-> $$ = nterm expr (11.0-4: ) Returning to deterministic operation. Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.8: ) +Shifting token ID (11.8: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (11.8: ) +-> $$ = nterm expr (11.8: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (11.0-4: ) + $2 = token '+' (11.6: ) + $3 = nterm expr (11.8: ) +-> $$ = nterm expr (11.0-8: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (11.9: ) +Shifting token ';' (11.9: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (11.0-8: ) + $2 = token ';' (11.9: ) +-> $$ = nterm stmt (11.0-9: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-9.5: ) + $2 = nterm stmt (11.0-9: ) +-> $$ = nterm prog (1.1-11.9: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (13.0: ) +Shifting token TYPENAME (13.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.2: ) +Shifting token '(' (13.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (13.3: ) +Shifting token ID (13.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (13.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (13.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (13.4: ) +On stack 0, shifting token ')' (13.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (13.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token ';' () +Next token is token ';' (13.5: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (13.5: ) +On stack 0, shifting token ';' (13.5: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (13.5: ) Stack 1 now in state 23 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (15.0: ) Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (15.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (13.3: ) +-> $$ = nterm declarator (13.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (13.2: ) + $2 = nterm declarator (13.3: ) + $3 = token ')' (13.4: ) +-> $$ = nterm declarator (13.2-4: ) +Reducing stack -1 by rule 11 (line 97): + $1 = token TYPENAME (13.0: ) + $2 = nterm declarator (13.2-4: ) + $3 = token ';' (13.5: ) +-> $$ = nterm decl (13.0-5: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (13.0-5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (13.3: ) +-> $$ = nterm expr (13.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (13.0: ) + $2 = token '(' (13.2: ) + $3 = nterm expr (13.3: ) + $4 = token ')' (13.4: ) +-> $$ = nterm expr (13.0-4: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (13.0-4: ) + $2 = token ';' (13.5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-11.9: ) + $2 = nterm stmt (13.0-5: ) +-> $$ = nterm prog (1.1-13.5: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (15.2: ) +Shifting token '(' (15.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (15.3: ) +Shifting token ID (15.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (15.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (15.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (15.4: ) +On stack 0, shifting token ')' (15.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (15.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '=' () +Next token is token '=' (15.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '=' () -On stack 0, shifting token '=' () +Next token is token '=' (15.6: ) +On stack 0, shifting token '=' (15.6: ) Stack 0 now in state 14 -On stack 1, shifting token '=' () +On stack 1, shifting token '=' (15.6: ) Stack 1 now in state 22 Stack 0 Entering state 14 Reading a token -Next token is token ID () +Next token is token ID (15.8: ) Stack 1 Entering state 22 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.8: ) +On stack 0, shifting token ID (15.8: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.8: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token '+' () +Next token is token '+' (15.10: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token '+' () -On stack 0, shifting token '+' () +Next token is token '+' (15.10: ) +On stack 0, shifting token '+' (15.10: ) Stack 0 now in state 15 -On stack 1, shifting token '+' () +On stack 1, shifting token '+' (15.10: ) Stack 1 now in state 15 Stack 0 Entering state 15 Reading a token -Next token is token ID () +Next token is token ID (15.12: ) Stack 1 Entering state 15 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.12: ) +On stack 0, shifting token ID (15.12: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.12: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token ';' () -Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Next token is token ';' (15.13: ) +Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. Stack 0 Entering state 8 -Next token is token ';' () +Next token is token ';' (15.13: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (15.13: ) +On stack 0, shifting token ';' (15.13: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (15.13: ) Stack 1 now in state 30 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (17.0: ) Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (17.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (15.3: ) +-> $$ = nterm declarator (15.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (15.2: ) + $2 = nterm declarator (15.3: ) + $3 = token ')' (15.4: ) +-> $$ = nterm declarator (15.2-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 12 (line 99): + $1 = token TYPENAME (15.0: ) + $2 = nterm declarator (15.2-4: ) + $3 = token '=' (15.6: ) + $4 = nterm expr (15.8-12: ) + $5 = token ';' (15.13: ) +-> $$ = nterm decl (15.0-13: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (15.0-13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.3: ) +-> $$ = nterm expr (15.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (15.0: ) + $2 = token '(' (15.2: ) + $3 = nterm expr (15.3: ) + $4 = token ')' (15.4: ) +-> $$ = nterm expr (15.0-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 10 (line 94): + $1 = nterm expr (15.0-4: ) + $2 = token '=' (15.6: ) + $3 = nterm expr (15.8-12: ) +-> $$ = nterm expr (15.0-12: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (15.0-12: ) + $2 = token ';' (15.13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-13.5: ) + $2 = nterm stmt (15.0-13: ) +-> $$ = nterm prog (1.1-15.13: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (17.2: ) +Shifting token '(' (17.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (17.3: ) +Shifting token ID (17.3: ) Entering state 18 Reading a token -Next token is token ID () -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Next token is token ID (17.5: ) +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (17.3: ) +-> $$ = nterm expr (17.3: ) Entering state 20 -Next token is token ID () -syntax error, unexpected ID, expecting '=' or '+' or ')' -Error: popping nterm expr () -Error: popping token '(' () -Error: popping token TYPENAME () -Shifting token error () +Next token is token ID (17.5: ) +17.5: syntax error +Error: popping nterm expr (17.3: ) +Error: popping token '(' (17.2: ) +Error: popping token TYPENAME (17.0: ) +Shifting token error (17.0-5: ) Entering state 3 -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.5: ) +Error: discarding token ID (17.5: ) Reading a token -Next token is token ')' () -Error: discarding token ')' () +Next token is token ')' (17.6: ) +Error: discarding token ')' (17.6: ) Reading a token -Next token is token '=' () -Error: discarding token '=' () +Next token is token '=' (17.8: ) +Error: discarding token '=' (17.8: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.10: ) +Error: discarding token ID (17.10: ) Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '+' (17.12: ) +Error: discarding token '+' (17.12: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.14: ) +Error: discarding token ID (17.14: ) Reading a token -Next token is token ';' () +Next token is token ';' (17.15: ) Entering state 3 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (17.15: ) +Shifting token ';' (17.15: ) Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 5 (line 86): + $1 = token error (17.0-14: ) + $2 = token ';' (17.15: ) +-> $$ = nterm stmt (17.0-15: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-15.13: ) + $2 = nterm stmt (17.0-15: ) +-> $$ = nterm prog (1.1-17.15: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.0: ) +Shifting token ID (19.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.0: ) +-> $$ = nterm expr (19.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (19.2: ) +Shifting token '+' (19.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.4: ) +Shifting token ID (19.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.4: ) +-> $$ = nterm expr (19.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (19.0: ) + $2 = token '+' (19.2: ) + $3 = nterm expr (19.4: ) +-> $$ = nterm expr (19.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (19.5: ) +Shifting token ';' (19.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (19.0-4: ) + $2 = token ';' (19.5: ) +-> $$ = nterm stmt (19.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-17.15: ) + $2 = nterm stmt (19.0-5: ) +-> $$ = nterm prog (1.1-19.5: ) Entering state 1 Reading a token -Next token is token '@' () -Shifting token '@' () +Next token is token '@' (21.0: ) +Shifting token '@' (21.0: ) Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () -stderr: -713. cxx-type.at:455: ok -stdout: -./c++.at:1362: $PREPARSER ./input aaaas -stderr: -stdout: -stderr: -exception caught: reduction -./cxx-type.at:452: $PREPARSER ./types test-input -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -17.5: syntax error -./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -724. glr-regression.at:593: testing Duplicate representation of merged trees: %union { char *ptr; } glr.cc ... -./glr-regression.at:593: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:452: $PREPARSER ./types -p test-input -stderr: -stdout: -./c++.at:1362: $PREPARSER ./input i +Reducing stack 0 by rule 6 (line 87): + $1 = token '@' (21.0: ) +Cleanup: popping nterm prog (1.1-19.5: ) stderr: +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 71): @@ -260563,19 +257494,6 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (13.0-5: ) -> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (13.3: ) --> $$ = nterm expr (13.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (13.0: ) - $2 = token '(' (13.2: ) - $3 = nterm expr (13.3: ) - $4 = token ')' (13.4: ) --> $$ = nterm expr (13.0-4: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (13.0-4: ) - $2 = token ';' (13.5: ) --> $$ = nterm stmt (13.0-5: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-11.9: ) $2 = nterm stmt (13.0-5: ) @@ -260717,35 +257635,6 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (15.0-13: ) -> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.3: ) --> $$ = nterm expr (15.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (15.0: ) - $2 = token '(' (15.2: ) - $3 = nterm expr (15.3: ) - $4 = token ')' (15.4: ) --> $$ = nterm expr (15.0-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 10 (line 94): - $1 = nterm expr (15.0-4: ) - $2 = token '=' (15.6: ) - $3 = nterm expr (15.8-12: ) --> $$ = nterm expr (15.0-12: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (15.0-12: ) - $2 = token ';' (15.13: ) --> $$ = nterm stmt (15.0-13: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-13.5: ) $2 = nterm stmt (15.0-13: ) @@ -260853,14 +257742,20 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) +./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./glr-regression.at:205: $PREPARSER ./glr-regr1 BPBPB +714. glr-regression.at:205: ok +./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +723. glr-regression.at:592: testing Duplicate representation of merged trees: %union { char *ptr; } glr.c ... +./glr-regression.at:592: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y +stderr: +724. glr-regression.at:593: testing Duplicate representation of merged trees: %union { char *ptr; } glr.cc ... +./glr-regression.at:593: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +stdout: +stdout: +./c++.at:849: $PREPARSER ./input stderr: -exception caught: initial-action -./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:205: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 71): @@ -261179,19 +258074,6 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (13.0-5: ) -> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (13.3: ) --> $$ = nterm expr (13.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (13.0: ) - $2 = token '(' (13.2: ) - $3 = nterm expr (13.3: ) - $4 = token ')' (13.4: ) --> $$ = nterm expr (13.0-4: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (13.0-4: ) - $2 = token ';' (13.5: ) --> $$ = nterm stmt (13.0-5: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-11.9: ) $2 = nterm stmt (13.0-5: ) @@ -261333,35 +258215,6 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (15.0-13: ) -> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.3: ) --> $$ = nterm expr (15.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (15.0: ) - $2 = token '(' (15.2: ) - $3 = nterm expr (15.3: ) - $4 = token ')' (15.4: ) --> $$ = nterm expr (15.0-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 10 (line 94): - $1 = nterm expr (15.0-4: ) - $2 = token '=' (15.6: ) - $3 = nterm expr (15.8-12: ) --> $$ = nterm expr (15.0-12: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (15.0-12: ) - $2 = token ';' (15.13: ) --> $$ = nterm stmt (15.0-13: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-13.5: ) $2 = nterm stmt (15.0-13: ) @@ -261469,380 +258322,6 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) -712. cxx-type.at:449: ok -./c++.at:1362: $PREPARSER ./input aaaap -714. glr-regression.at:205: ok -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input --debug aaaap -725. glr-regression.at:594: testing Duplicate representation of merged trees: %union { char *ptr; } glr2.cc ... - -./glr-regression.at:594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y - -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffd51903c2f->Object::Object { } -0x7ffd51903cf0->Object::Object { 0x7ffd51903c2f } -0x7ffd51903c2f->Object::~Object { 0x7ffd51903c2f, 0x7ffd51903cf0 } -Next token is token 'a' (0x7ffd51903cf0 'a') -0x7ffd51903c60->Object::Object { 0x7ffd51903cf0 } -0x7ffd51903c17->Object::Object { 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::~Object { 0x7ffd51903c17, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903cf0->Object::~Object { 0x7ffd51903c60, 0x7ffd51903cf0 } -Shifting token 'a' (0x7ffd51903c60 'a') -0x560293a652e0->Object::Object { 0x7ffd51903c60 } -0x7ffd51903bef->Object::Object { 0x560293a652e0, 0x7ffd51903c60 } -0x7ffd51903bef->Object::~Object { 0x560293a652e0, 0x7ffd51903bef, 0x7ffd51903c60 } -0x7ffd51903c60->Object::~Object { 0x560293a652e0, 0x7ffd51903c60 } -Entering state 2 -Stack now 0 2 -0x7ffd51903d10->Object::Object { 0x560293a652e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560293a652e0 'a') --> $$ = nterm item (0x7ffd51903d10 'a') -0x560293a652e0->Object::~Object { 0x560293a652e0, 0x7ffd51903d10 } -0x560293a652e0->Object::Object { 0x7ffd51903d10 } -0x7ffd51903cc8->Object::Object { 0x560293a652e0, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::~Object { 0x560293a652e0, 0x7ffd51903cc8, 0x7ffd51903d10 } -0x7ffd51903d10->Object::~Object { 0x560293a652e0, 0x7ffd51903d10 } -Entering state 11 -Stack now 0 11 -Reading a token -0x7ffd51903c2f->Object::Object { 0x560293a652e0 } -0x7ffd51903cf0->Object::Object { 0x560293a652e0, 0x7ffd51903c2f } -0x7ffd51903c2f->Object::~Object { 0x560293a652e0, 0x7ffd51903c2f, 0x7ffd51903cf0 } -Next token is token 'a' (0x7ffd51903cf0 'a') -0x7ffd51903c60->Object::Object { 0x560293a652e0, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::Object { 0x560293a652e0, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::~Object { 0x560293a652e0, 0x7ffd51903c17, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903cf0->Object::~Object { 0x560293a652e0, 0x7ffd51903c60, 0x7ffd51903cf0 } -Shifting token 'a' (0x7ffd51903c60 'a') -0x560293a65300->Object::Object { 0x560293a652e0, 0x7ffd51903c60 } -0x7ffd51903bef->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c60 } -0x7ffd51903bef->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903bef, 0x7ffd51903c60 } -0x7ffd51903c60->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c60 } -Entering state 2 -Stack now 0 11 2 -0x7ffd51903d10->Object::Object { 0x560293a652e0, 0x560293a65300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560293a65300 'a') --> $$ = nterm item (0x7ffd51903d10 'a') -0x560293a65300->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903d10 } -0x560293a65300->Object::Object { 0x560293a652e0, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903cc8, 0x7ffd51903d10 } -0x7ffd51903d10->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903d10 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0x7ffd51903c2f->Object::Object { 0x560293a652e0, 0x560293a65300 } -0x7ffd51903cf0->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c2f } -0x7ffd51903c2f->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c2f, 0x7ffd51903cf0 } -Next token is token 'a' (0x7ffd51903cf0 'a') -0x7ffd51903c60->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c17, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903cf0->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c60, 0x7ffd51903cf0 } -Shifting token 'a' (0x7ffd51903c60 'a') -0x560293a65320->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c60 } -0x7ffd51903bef->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c60 } -0x7ffd51903bef->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903bef, 0x7ffd51903c60 } -0x7ffd51903c60->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c60 } -Entering state 2 -Stack now 0 11 11 2 -0x7ffd51903d10->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560293a65320 'a') --> $$ = nterm item (0x7ffd51903d10 'a') -0x560293a65320->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903d10 } -0x560293a65320->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903cc8, 0x7ffd51903d10 } -0x7ffd51903d10->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903d10 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x7ffd51903c2f->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320 } -0x7ffd51903cf0->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c2f } -0x7ffd51903c2f->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c2f, 0x7ffd51903cf0 } -Next token is token 'a' (0x7ffd51903cf0 'a') -0x7ffd51903c60->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c17, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903cf0->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c60, 0x7ffd51903cf0 } -Shifting token 'a' (0x7ffd51903c60 'a') -0x560293a65340->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c60 } -0x7ffd51903bef->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903c60 } -0x7ffd51903bef->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903bef, 0x7ffd51903c60 } -0x7ffd51903c60->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903c60 } -Entering state 2 -Stack now 0 11 11 11 2 -0x7ffd51903d10->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560293a65340 'a') --> $$ = nterm item (0x7ffd51903d10 'a') -0x560293a65340->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903d10 } -0x560293a65340->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903cc8, 0x7ffd51903d10 } -0x7ffd51903d10->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903d10 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x7ffd51903c2f->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340 } -0x7ffd51903cf0->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903c2f } -0x7ffd51903c2f->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903c2f, 0x7ffd51903cf0 } -Next token is token 'p' (0x7ffd51903cf0 'p'Exception caught: cleaning lookahead and stack -0x560293a65340->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903cf0 } -0x560293a65320->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903cf0 } -0x560293a65300->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903cf0 } -0x560293a652e0->Object::~Object { 0x560293a652e0, 0x7ffd51903cf0 } -0x7ffd51903cf0->Object::~Object { 0x7ffd51903cf0 } -exception caught: printer -end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffd51903c2f->Object::Object { } -0x7ffd51903cf0->Object::Object { 0x7ffd51903c2f } -0x7ffd51903c2f->Object::~Object { 0x7ffd51903c2f, 0x7ffd51903cf0 } -Next token is token 'a' (0x7ffd51903cf0 'a') -0x7ffd51903c60->Object::Object { 0x7ffd51903cf0 } -0x7ffd51903c17->Object::Object { 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::~Object { 0x7ffd51903c17, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903cf0->Object::~Object { 0x7ffd51903c60, 0x7ffd51903cf0 } -Shifting token 'a' (0x7ffd51903c60 'a') -0x560293a652e0->Object::Object { 0x7ffd51903c60 } -0x7ffd51903bef->Object::Object { 0x560293a652e0, 0x7ffd51903c60 } -0x7ffd51903bef->Object::~Object { 0x560293a652e0, 0x7ffd51903bef, 0x7ffd51903c60 } -0x7ffd51903c60->Object::~Object { 0x560293a652e0, 0x7ffd51903c60 } -Entering state 2 -Stack now 0 2 -0x7ffd51903d10->Object::Object { 0x560293a652e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560293a652e0 'a') --> $$ = nterm item (0x7ffd51903d10 'a') -0x560293a652e0->Object::~Object { 0x560293a652e0, 0x7ffd51903d10 } -0x560293a652e0->Object::Object { 0x7ffd51903d10 } -0x7ffd51903cc8->Object::Object { 0x560293a652e0, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::~Object { 0x560293a652e0, 0x7ffd51903cc8, 0x7ffd51903d10 } -0x7ffd51903d10->Object::~Object { 0x560293a652e0, 0x7ffd51903d10 } -Entering state 11 -Stack now 0 11 -Reading a token -0x7ffd51903c2f->Object::Object { 0x560293a652e0 } -0x7ffd51903cf0->Object::Object { 0x560293a652e0, 0x7ffd51903c2f } -0x7ffd51903c2f->Object::~Object { 0x560293a652e0, 0x7ffd51903c2f, 0x7ffd51903cf0 } -Next token is token 'a' (0x7ffd51903cf0 'a') -0x7ffd51903c60->Object::Object { 0x560293a652e0, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::Object { 0x560293a652e0, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::~Object { 0x560293a652e0, 0x7ffd51903c17, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903cf0->Object::~Object { 0x560293a652e0, 0x7ffd51903c60, 0x7ffd51903cf0 } -Shifting token 'a' (0x7ffd51903c60 'a') -0x560293a65300->Object::Object { 0x560293a652e0, 0x7ffd51903c60 } -0x7ffd51903bef->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c60 } -0x7ffd51903bef->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903bef, 0x7ffd51903c60 } -0x7ffd51903c60->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c60 } -Entering state 2 -Stack now 0 11 2 -0x7ffd51903d10->Object::Object { 0x560293a652e0, 0x560293a65300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560293a65300 'a') --> $$ = nterm item (0x7ffd51903d10 'a') -0x560293a65300->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903d10 } -0x560293a65300->Object::Object { 0x560293a652e0, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903cc8, 0x7ffd51903d10 } -0x7ffd51903d10->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903d10 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0x7ffd51903c2f->Object::Object { 0x560293a652e0, 0x560293a65300 } -0x7ffd51903cf0->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c2f } -0x7ffd51903c2f->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c2f, 0x7ffd51903cf0 } -Next token is token 'a' (0x7ffd51903cf0 'a') -0x7ffd51903c60->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c17, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903cf0->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c60, 0x7ffd51903cf0 } -Shifting token 'a' (0x7ffd51903c60 'a') -0x560293a65320->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903c60 } -0x7ffd51903bef->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c60 } -0x7ffd51903bef->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903bef, 0x7ffd51903c60 } -0x7ffd51903c60->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c60 } -Entering state 2 -Stack now 0 11 11 2 -0x7ffd51903d10->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560293a65320 'a') --> $$ = nterm item (0x7ffd51903d10 'a') -0x560293a65320->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903d10 } -0x560293a65320->Object::Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903cc8, 0x7ffd51903d10 } -0x7ffd51903d10->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903d10 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x7ffd51903c2f->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320 } -0x7ffd51903cf0->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c2f } -0x7ffd51903c2f->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c2f, 0x7ffd51903cf0 } -Next token is token 'a' (0x7ffd51903cf0 'a') -0x7ffd51903c60->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903c17->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c17, 0x7ffd51903c60, 0x7ffd51903cf0 } -0x7ffd51903cf0->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c60, 0x7ffd51903cf0 } -Shifting token 'a' (0x7ffd51903c60 'a') -0x560293a65340->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903c60 } -0x7ffd51903bef->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903c60 } -0x7ffd51903bef->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903bef, 0x7ffd51903c60 } -0x7ffd51903c60->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903c60 } -Entering state 2 -Stack now 0 11 11 11 2 -0x7ffd51903d10->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560293a65340 'a') --> $$ = nterm item (0x7ffd51903d10 'a') -0x560293a65340->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903d10 } -0x560293a65340->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903d10 } -0x7ffd51903cc8->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903cc8, 0x7ffd51903d10 } -0x7ffd51903d10->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903d10 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x7ffd51903c2f->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340 } -0x7ffd51903cf0->Object::Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903c2f } -0x7ffd51903c2f->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903c2f, 0x7ffd51903cf0 } -Next token is token 'p' (0x7ffd51903cf0 'p'Exception caught: cleaning lookahead and stack -0x560293a65340->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x560293a65340, 0x7ffd51903cf0 } -0x560293a65320->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x560293a65320, 0x7ffd51903cf0 } -0x560293a65300->Object::~Object { 0x560293a652e0, 0x560293a65300, 0x7ffd51903cf0 } -0x560293a652e0->Object::~Object { 0x560293a652e0, 0x7ffd51903cf0 } -0x7ffd51903cf0->Object::~Object { 0x7ffd51903cf0 } -exception caught: printer -end { } -./c++.at:1362: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae -./glr-regression.at:593: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE -726. glr-regression.at:596: testing Duplicate representation of merged trees: api.value.type=union glr.c ... -stderr: -./glr-regression.at:596: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -727. glr-regression.at:597: testing Duplicate representation of merged trees: api.value.type=union glr.cc ... -./glr-regression.at:597: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -./glr-regression.at:354: $PREPARSER ./glr-regr2a input1.txt -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:594: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -./glr-regression.at:354: $PREPARSER ./glr-regr2a input2.txt -stderr: -./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:596: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS -./glr-regression.at:354: $PREPARSER ./glr-regr2a input3.txt -stderr: -./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -717. glr-regression.at:354: ok - -./glr-regression.at:597: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -728. glr-regression.at:598: testing Duplicate representation of merged trees: api.value.type=union glr2.cc ... -./glr-regression.at:598: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -./glr-regression.at:598: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -stderr: -stdout: -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stderr: -stdout: -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:567: $here/modern -stdout: -Legac++ -./c++.at:567: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./cxx-type.at:441: $PREPARSER ./types test-input -======== Testing with C++ standard flags: '' -stderr: -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -17.5: syntax error -./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:441: $PREPARSER ./types -p test-input -stderr: Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 71): @@ -262451,639 +258930,1388 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) -./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +712. cxx-type.at:449: ok +708. cxx-type.at:426: ./glr-regression.at:354: $PREPARSER ./glr-regr2a input1.txt + ok +stderr: +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:1555: $PREPARSER ./test +./glr-regression.at:488: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr3 glr-regr3.c $LIBS + +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +./glr-regression.at:354: $PREPARSER ./glr-regr2a input2.txt +stderr: +stderr: +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./glr-regression.at:489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +======== Testing with C++ standard flags: '' +./c++.at:855: $PREPARSER ./input +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./glr-regression.at:490: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS +./c++.at:1064: $PREPARSER ./input < in +stderr: +./glr-regression.at:592: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stdout: +stderr: +stdout: +./c++.at:567: $here/modern +./cxx-type.at:458: $PREPARSER ./types test-input +stderr: +./glr-regression.at:354: $PREPARSER ./glr-regr2a input3.txt +======== Testing with C++ standard flags: '' +./glr-regression.at:593: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +syntax error, unexpected ID, expecting '=' or '+' or ')' +stderr: +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stdout: +./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Modern C++: 201703 +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +725. glr-regression.at:594: testing Duplicate representation of merged trees: %union { char *ptr; } glr2.cc ... +726. glr-regression.at:596: testing Duplicate representation of merged trees: api.value.type=union glr.c ... +./glr-regression.at:596: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y +./c++.at:567: $PREPARSER ./list +./glr-regression.at:594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1064: $PREPARSER ./input < in +./cxx-type.at:458: $PREPARSER ./types -p test-input +stderr: +717. glr-regression.at:354: ok +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +stdout: +stdout: +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +727. glr-regression.at:597: testing Duplicate representation of merged trees: api.value.type=union glr.cc ... Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 71): --> $$ = nterm prog (1.1: ) +Reducing stack 0 by rule 1 (line 64): +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token ID (3.0: ) -Shifting token ID (3.0: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.0: ) --> $$ = nterm expr (3.0: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token '+' (3.2: ) -Shifting token '+' (3.2: ) +Next token is token '+' () +Shifting token '+' () Entering state 15 Reading a token -Next token is token ID (3.4: ) -Shifting token ID (3.4: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.4: ) --> $$ = nterm expr (3.4: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (3.0: ) - $2 = token '+' (3.2: ) - $3 = nterm expr (3.4: ) --> $$ = nterm expr (3.0-4: ) +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token ';' (3.5: ) -Shifting token ';' (3.5: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (3.0-4: ) - $2 = token ';' (3.5: ) --> $$ = nterm stmt (3.0-5: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1: ) - $2 = nterm stmt (3.0-5: ) --> $$ = nterm prog (1.1-3.5: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (5.0: ) -Shifting token TYPENAME (5.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token ID (5.2: ) -Shifting token ID (5.2: ) +Next token is token ID () +Shifting token ID () Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (5.2: ) --> $$ = nterm declarator (5.2: ) +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () Entering state 13 Reading a token -Next token is token ';' (5.3: ) -Shifting token ';' (5.3: ) +Next token is token ';' () +Shifting token ';' () Entering state 23 -Reducing stack 0 by rule 11 (line 97): - $1 = token TYPENAME (5.0: ) - $2 = nterm declarator (5.2: ) - $3 = token ';' (5.3: ) --> $$ = nterm decl (5.0-3: ) +Reducing stack 0 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (5.0-3: ) --> $$ = nterm stmt (5.0-3: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-3.5: ) - $2 = nterm stmt (5.0-3: ) --> $$ = nterm prog (1.1-5.3: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (7.0: ) -Shifting token TYPENAME (7.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token ID (7.2: ) -Shifting token ID (7.2: ) +Next token is token ID () +Shifting token ID () Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (7.2: ) --> $$ = nterm declarator (7.2: ) +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () Entering state 13 Reading a token -Next token is token '=' (7.4: ) -Shifting token '=' (7.4: ) +Next token is token '=' () +Shifting token '=' () Entering state 22 Reading a token -Next token is token ID (7.6: ) -Shifting token ID (7.6: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (7.6: ) --> $$ = nterm expr (7.6: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 29 Reading a token -Next token is token ';' (7.7: ) -Shifting token ';' (7.7: ) +Next token is token ';' () +Shifting token ';' () Entering state 30 -Reducing stack 0 by rule 12 (line 99): - $1 = token TYPENAME (7.0: ) - $2 = nterm declarator (7.2: ) - $3 = token '=' (7.4: ) - $4 = nterm expr (7.6: ) - $5 = token ';' (7.7: ) --> $$ = nterm decl (7.0-7: ) +Reducing stack 0 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (7.0-7: ) --> $$ = nterm stmt (7.0-7: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-5.3: ) - $2 = nterm stmt (7.0-7: ) --> $$ = nterm prog (1.1-7.7: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token ID (9.0: ) -Shifting token ID (9.0: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.0: ) --> $$ = nterm expr (9.0: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token '=' (9.2: ) -Shifting token '=' (9.2: ) +Next token is token '=' () +Shifting token '=' () Entering state 14 Reading a token -Next token is token ID (9.4: ) -Shifting token ID (9.4: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.4: ) --> $$ = nterm expr (9.4: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 24 Reading a token -Next token is token ';' (9.5: ) -Reducing stack 0 by rule 10 (line 94): - $1 = nterm expr (9.0: ) - $2 = token '=' (9.2: ) - $3 = nterm expr (9.4: ) --> $$ = nterm expr (9.0-4: ) +Next token is token ';' () +Reducing stack 0 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 -Next token is token ';' (9.5: ) -Shifting token ';' (9.5: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (9.0-4: ) - $2 = token ';' (9.5: ) --> $$ = nterm stmt (9.0-5: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-7.7: ) - $2 = nterm stmt (9.0-5: ) --> $$ = nterm prog (1.1-9.5: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (11.0: ) -Shifting token TYPENAME (11.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token '(' (11.2: ) -Shifting token '(' (11.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (11.3: ) -Shifting token ID (11.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ')' (11.4: ) +Next token is token ')' () Stack 0 Entering state 18 -Next token is token ')' (11.4: ) +Next token is token ')' () Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' (11.4: ) +Next token is token ')' () Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -On stack 0, shifting token ')' (11.4: ) +Next token is token ')' () +On stack 0, shifting token ')' () Stack 0 now in state 27 -On stack 1, shifting token ')' (11.4: ) +On stack 1, shifting token ')' () Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '+' (11.6: ) +Next token is token '+' () Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '+' (11.6: ) +Next token is token '+' () Stack 1 dies. Removing dead stacks. -On stack 0, shifting token '+' (11.6: ) +On stack 0, shifting token '+' () Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (11.3: ) --> $$ = nterm expr (11.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (11.0: ) - $2 = token '(' (11.2: ) - $3 = nterm expr (11.3: ) - $4 = token ')' (11.4: ) --> $$ = nterm expr (11.0-4: ) +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () Returning to deterministic operation. Entering state 15 Reading a token -Next token is token ID (11.8: ) -Shifting token ID (11.8: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (11.8: ) --> $$ = nterm expr (11.8: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (11.0-4: ) - $2 = token '+' (11.6: ) - $3 = nterm expr (11.8: ) --> $$ = nterm expr (11.0-8: ) +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token ';' (11.9: ) -Shifting token ';' (11.9: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (11.0-8: ) - $2 = token ';' (11.9: ) --> $$ = nterm stmt (11.0-9: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-9.5: ) - $2 = nterm stmt (11.0-9: ) --> $$ = nterm prog (1.1-11.9: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (13.0: ) -Shifting token TYPENAME (13.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token '(' (13.2: ) -Shifting token '(' (13.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (13.3: ) -Shifting token ID (13.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ')' (13.4: ) +Next token is token ')' () Stack 0 Entering state 18 -Next token is token ')' (13.4: ) +Next token is token ')' () Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' (13.4: ) +Next token is token ')' () Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -On stack 0, shifting token ')' (13.4: ) +Next token is token ')' () +On stack 0, shifting token ')' () Stack 0 now in state 27 -On stack 1, shifting token ')' (13.4: ) +On stack 1, shifting token ')' () Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token ';' (13.5: ) +Next token is token ';' () Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token ';' (13.5: ) -On stack 0, shifting token ';' (13.5: ) +Next token is token ';' () +On stack 0, shifting token ';' () Stack 0 now in state 16 -On stack 1, shifting token ';' (13.5: ) +On stack 1, shifting token ';' () Stack 1 now in state 23 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 23 +Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '=' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '=' () +On stack 0, shifting token '=' () +Stack 0 now in state 14 +On stack 1, shifting token '=' () +Stack 1 now in state 22 +Stack 0 Entering state 14 +Reading a token +Next token is token ID () +Stack 1 Entering state 22 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token '+' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token '+' () +On stack 0, shifting token '+' () +Stack 0 now in state 15 +On stack 1, shifting token '+' () +Stack 1 now in state 15 +Stack 0 Entering state 15 +Reading a token +Next token is token ID () +Stack 1 Entering state 15 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Stack 0 Entering state 25 +Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token ';' () +Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Stack 0 Entering state 8 +Next token is token ';' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Stack 1 Entering state 25 +Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 30 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 30 +Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ID () +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 20 +Next token is token ID () +syntax error, unexpected ID, expecting '=' or '+' or ')' +Error: popping nterm expr () +Error: popping token '(' () +Error: popping token TYPENAME () +Shifting token error () +Entering state 3 +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ')' () +Error: discarding token ')' () +Reading a token +Next token is token '=' () +Error: discarding token '=' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ';' () +Entering state 3 +Next token is token ';' () +Shifting token ';' () +Entering state 10 +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token '@' () +Shifting token '@' () +Entering state 6 +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () +./glr-regression.at:597: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +stderr: +error: invalid character +./c++.at:1501: $CXX $CPPFLAGS $CXXFLAGS -Iinclude $LDFLAGS -o parser x[12].o main.cc $LIBS +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +error: invalid expression +caught error +error: invalid character +caught error + +./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +stderr: +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +Starting parse +Entering state 0 +Reducing stack 0 by rule 1 (line 64): +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 23 +Reducing stack 0 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 22 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 29 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 30 +Reducing stack 0 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 14 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 24 +Reading a token +Next token is token ';' () +Reducing stack 0 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '+' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '+' () +Stack 1 dies. +Removing dead stacks. +On stack 0, shifting token '+' () +Stack 0 now in state 15 +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Returning to deterministic operation. +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token ';' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 23 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME (15.0: ) +Next token is token TYPENAME () Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. +Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME (15.0: ) +On stack 0, shifting token TYPENAME () Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (13.3: ) --> $$ = nterm declarator (13.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (13.2: ) - $2 = nterm declarator (13.3: ) - $3 = token ')' (13.4: ) --> $$ = nterm declarator (13.2-4: ) -Reducing stack -1 by rule 11 (line 97): - $1 = token TYPENAME (13.0: ) - $2 = nterm declarator (13.2-4: ) - $3 = token ';' (13.5: ) --> $$ = nterm decl (13.0-5: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (13.0-5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (13.3: ) --> $$ = nterm expr (13.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (13.0: ) - $2 = token '(' (13.2: ) - $3 = nterm expr (13.3: ) - $4 = token ')' (13.4: ) --> $$ = nterm expr (13.0-4: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (13.0-4: ) - $2 = token ';' (13.5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-11.9: ) - $2 = nterm stmt (13.0-5: ) --> $$ = nterm prog (1.1-13.5: ) +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' (15.2: ) -Shifting token '(' (15.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (15.3: ) -Shifting token ID (15.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ')' (15.4: ) +Next token is token ')' () Stack 0 Entering state 18 -Next token is token ')' (15.4: ) +Next token is token ')' () Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' (15.4: ) +Next token is token ')' () Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -On stack 0, shifting token ')' (15.4: ) +Next token is token ')' () +On stack 0, shifting token ')' () Stack 0 now in state 27 -On stack 1, shifting token ')' (15.4: ) +On stack 1, shifting token ')' () Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '=' (15.6: ) +Next token is token '=' () Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '=' (15.6: ) -On stack 0, shifting token '=' (15.6: ) +Next token is token '=' () +On stack 0, shifting token '=' () Stack 0 now in state 14 -On stack 1, shifting token '=' (15.6: ) +On stack 1, shifting token '=' () Stack 1 now in state 22 Stack 0 Entering state 14 Reading a token -Next token is token ID (15.8: ) +Next token is token ID () Stack 1 Entering state 22 -Next token is token ID (15.8: ) -On stack 0, shifting token ID (15.8: ) +Next token is token ID () +On stack 0, shifting token ID () Stack 0 now in state 5 -On stack 1, shifting token ID (15.8: ) +On stack 1, shifting token ID () Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token '+' (15.10: ) +Next token is token '+' () Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token '+' (15.10: ) -On stack 0, shifting token '+' (15.10: ) +Next token is token '+' () +On stack 0, shifting token '+' () Stack 0 now in state 15 -On stack 1, shifting token '+' (15.10: ) +On stack 1, shifting token '+' () Stack 1 now in state 15 Stack 0 Entering state 15 Reading a token -Next token is token ID (15.12: ) +Next token is token ID () Stack 1 Entering state 15 -Next token is token ID (15.12: ) -On stack 0, shifting token ID (15.12: ) +Next token is token ID () +On stack 0, shifting token ID () Stack 0 now in state 5 -On stack 1, shifting token ID (15.12: ) +On stack 1, shifting token ID () Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. +Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token ';' (15.13: ) -Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. +Next token is token ';' () +Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. Stack 0 Entering state 8 -Next token is token ';' (15.13: ) +Next token is token ';' () Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. +Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token ';' (15.13: ) -On stack 0, shifting token ';' (15.13: ) +Next token is token ';' () +On stack 0, shifting token ';' () Stack 0 now in state 16 -On stack 1, shifting token ';' (15.13: ) +On stack 1, shifting token ';' () Stack 1 now in state 30 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME (17.0: ) +Next token is token TYPENAME () Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. +Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME (17.0: ) +On stack 0, shifting token TYPENAME () Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (15.3: ) --> $$ = nterm declarator (15.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (15.2: ) - $2 = nterm declarator (15.3: ) - $3 = token ')' (15.4: ) --> $$ = nterm declarator (15.2-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 12 (line 99): - $1 = token TYPENAME (15.0: ) - $2 = nterm declarator (15.2-4: ) - $3 = token '=' (15.6: ) - $4 = nterm expr (15.8-12: ) - $5 = token ';' (15.13: ) --> $$ = nterm decl (15.0-13: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (15.0-13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.3: ) --> $$ = nterm expr (15.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (15.0: ) - $2 = token '(' (15.2: ) - $3 = nterm expr (15.3: ) - $4 = token ')' (15.4: ) --> $$ = nterm expr (15.0-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 10 (line 94): - $1 = nterm expr (15.0-4: ) - $2 = token '=' (15.6: ) - $3 = nterm expr (15.8-12: ) --> $$ = nterm expr (15.0-12: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (15.0-12: ) - $2 = token ';' (15.13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-13.5: ) - $2 = nterm stmt (15.0-13: ) --> $$ = nterm prog (1.1-15.13: ) +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' (17.2: ) -Shifting token '(' (17.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (17.3: ) -Shifting token ID (17.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ID (17.5: ) -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (17.3: ) --> $$ = nterm expr (17.3: ) +Next token is token ID () +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 20 -Next token is token ID (17.5: ) -17.5: syntax error -Error: popping nterm expr (17.3: ) -Error: popping token '(' (17.2: ) -Error: popping token TYPENAME (17.0: ) -Shifting token error (17.0-5: ) +Next token is token ID () +syntax error, unexpected ID, expecting '=' or '+' or ')' +Error: popping nterm expr () +Error: popping token '(' () +Error: popping token TYPENAME () +Shifting token error () Entering state 3 -Next token is token ID (17.5: ) -Error: discarding token ID (17.5: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token ')' (17.6: ) -Error: discarding token ')' (17.6: ) +Next token is token ')' () +Error: discarding token ')' () Reading a token -Next token is token '=' (17.8: ) -Error: discarding token '=' (17.8: ) +Next token is token '=' () +Error: discarding token '=' () Reading a token -Next token is token ID (17.10: ) -Error: discarding token ID (17.10: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token '+' (17.12: ) -Error: discarding token '+' (17.12: ) +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token ID (17.14: ) -Error: discarding token ID (17.14: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token ';' (17.15: ) +Next token is token ';' () Entering state 3 -Next token is token ';' (17.15: ) -Shifting token ';' (17.15: ) +Next token is token ';' () +Shifting token ';' () Entering state 10 -Reducing stack 0 by rule 5 (line 86): - $1 = token error (17.0-14: ) - $2 = token ';' (17.15: ) --> $$ = nterm stmt (17.0-15: ) +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-15.13: ) - $2 = nterm stmt (17.0-15: ) --> $$ = nterm prog (1.1-17.15: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token ID (19.0: ) -Shifting token ID (19.0: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.0: ) --> $$ = nterm expr (19.0: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token '+' (19.2: ) -Shifting token '+' (19.2: ) +Next token is token '+' () +Shifting token '+' () Entering state 15 Reading a token -Next token is token ID (19.4: ) -Shifting token ID (19.4: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.4: ) --> $$ = nterm expr (19.4: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (19.0: ) - $2 = token '+' (19.2: ) - $3 = nterm expr (19.4: ) --> $$ = nterm expr (19.0-4: ) +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token ';' (19.5: ) -Shifting token ';' (19.5: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (19.0-4: ) - $2 = token ';' (19.5: ) --> $$ = nterm stmt (19.0-5: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-17.15: ) - $2 = nterm stmt (19.0-5: ) --> $$ = nterm prog (1.1-19.5: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token '@' (21.0: ) -Shifting token '@' (21.0: ) +Next token is token '@' () +Shifting token '@' () Entering state 6 -Reducing stack 0 by rule 6 (line 87): - $1 = token '@' (21.0: ) -Cleanup: popping nterm prog (1.1-19.5: ) -710. cxx-type.at:438: ok +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () +./c++.at:1065: $PREPARSER ./input < in +713. cxx-type.at:455: ok +./glr-regression.at:596: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS +stderr: stderr: stdout: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./glr-regression.at:488: $PREPARSER ./glr-regr3 input.txt +./c++.at:1065: $PREPARSER ./input < in stderr: -./glr-regression.at:488: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -720. glr-regression.at:488: ok - +./glr-regression.at:594: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./glr-regression.at:597: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +./c++.at:1555: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +728. glr-regression.at:598: testing Duplicate representation of merged trees: api.value.type=union glr2.cc ... +./glr-regression.at:598: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y 729. glr-regression.at:670: testing User destructor for unresolved GLR semantic value: glr.c ... ./glr-regression.at:670: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.c glr-regr5.y -730. glr-regression.at:671: testing User destructor for unresolved GLR semantic value: glr.cc ... -./glr-regression.at:671: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./glr-regression.at:670: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr5 glr-regr5.c $LIBS +./glr-regression.at:598: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS stderr: +stdout: +./c++.at:1555: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: -./c++.at:573: $here/modern -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:571: $here/modern stdout: -Legac++ -./c++.at:573: $PREPARSER ./list +Modern C++: 201703 +./c++.at:571: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -263106,23 +260334,68 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:671: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:1555: ./check +-std=c++11 not supported +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:941: $PREPARSER ./input stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./glr-regression.at:355: $PREPARSER ./glr-regr2a input1.txt +./c++.at:573: $here/modern +stdout: +Modern C++: 201703 +./c++.at:573: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: ./c++.at:572: $here/modern +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Legac++ +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +Modern C++: 201703 ./c++.at:572: $PREPARSER ./list -./glr-regression.at:355: $PREPARSER ./glr-regr2a input2.txt -stderr: stderr: Destroy: "0" Destroy: "0" @@ -263145,47 +260418,501 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:355: $PREPARSER ./glr-regr2a input3.txt -stderr: -stderr: ======== Testing with C++ standard flags: '' ./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:574: $here/modern +stdout: +Modern C++: 201703 +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./c++.at:574: $PREPARSER ./list +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./glr-regression.at:206: $PREPARSER ./glr-regr1 BPBPB +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stderr: +stdout: +./c++.at:1502: $PREPARSER ./parser +stderr: +stderr: +stdout: +./c++.at:1502: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:206: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:488: $PREPARSER ./glr-regr3 input.txt +stderr: +715. glr-regression.at:206: 690. c++.at:1422: ok + ok +./glr-regression.at:488: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +720. glr-regression.at:488: ok + + + +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +730. glr-regression.at:671: testing User destructor for unresolved GLR semantic value: glr.cc ... +./glr-regression.at:671: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y stdout: -./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./glr-regression.at:592: $PREPARSER ./glr-regr4 stderr: -718. glr-regression.at:355: ok +731. glr-regression.at:672: testing User destructor for unresolved GLR semantic value: glr2.cc ... +./glr-regression.at:672: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y ./glr-regression.at:592: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +723. glr-regression.at:592: ok +732. glr-regression.at:738: testing User destructor after an error during a split parse: glr.c ... +./glr-regression.at:738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.c glr-regr6.y stderr: stdout: -./existing.at:1460: $PREPARSER ./input -723. glr-regression.at:592: - ok + +./c++.at:92: $PREPARSER ./input stderr: -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:671: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS +./glr-regression.at:672: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./glr-regression.at:738: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr6 glr-regr6.c $LIBS +stderr: +stdout: +stderr: +./glr-regression.at:355: $PREPARSER ./glr-regr2a input1.txt +stdout: +stderr: +./glr-regression.at:596: $PREPARSER ./glr-regr4 +./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./glr-regression.at:355: $PREPARSER ./glr-regr2a input2.txt +./glr-regression.at:596: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stderr: +./c++.at:1361: $PREPARSER ./input aaaas stdout: +stderr: ./c++.at:1360: $PREPARSER ./input aaaas +726. glr-regression.at:596: stderr: +exception caught: reduction + ok +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./glr-regression.at:355: $PREPARSER ./glr-regr2a input3.txt +./c++.at:849: $PREPARSER ./input +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +733. glr-regression.at:739: testing User destructor after an error during a split parse: glr.cc ... +./c++.at:1361: $PREPARSER ./input aaaal +./c++.at:1360: $PREPARSER ./input aaaal +./glr-regression.at:739: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y +stderr: +stderr: +stderr: +./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: yylex +======== Testing with C++ standard flags: '' + +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +718. glr-regression.at:355: ok +stderr: +./c++.at:1360: $PREPARSER ./input i +exception caught: yylex +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +./c++.at:1361: $PREPARSER ./input i +stdout: + +./glr-regression.at:670: $PREPARSER ./glr-regr5 +stderr: +stderr: +exception caught: initial-action +stderr: +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./c++.at:1360: $PREPARSER ./input aaaap +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +======== Testing with C++ standard flags: '' +./glr-regression.at:670: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input --debug aaaap +./c++.at:1361: $PREPARSER ./input aaaap +734. glr-regression.at:740: testing User destructor after an error during a split parse: glr2.cc ... +729. glr-regression.at:670: ok +./glr-regression.at:739: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS +./glr-regression.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +./c++.at:1363: $PREPARSER ./input aaaas +stderr: exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: +735. glr-regression.at:843: testing Duplicated user destructor for lookahead: glr.c ... +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x56523278cf40->Object::Object { } +Next token is token 'a' (0x56523278cf40 'a') +Shifting token 'a' (0x56523278cf40 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56523278cf40 'a') +-> $$ = nterm item (0x56523278cf40 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x56523278cf90->Object::Object { 0x56523278cf40 } +Next token is token 'a' (0x56523278cf90 'a') +Shifting token 'a' (0x56523278cf90 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56523278cf90 'a') +-> $$ = nterm item (0x56523278cf90 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x56523278cfe0->Object::Object { 0x56523278cf40, 0x56523278cf90 } +Next token is token 'a' (0x56523278cfe0 'a') +Shifting token 'a' (0x56523278cfe0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56523278cfe0 'a') +-> $$ = nterm item (0x56523278cfe0 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x56523278d030->Object::Object { 0x56523278cf40, 0x56523278cf90, 0x56523278cfe0 } +Next token is token 'a' (0x56523278d030 'a') +Shifting token 'a' (0x56523278d030 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56523278d030 'a') +-> $$ = nterm item (0x56523278d030 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x56523278d080->Object::Object { 0x56523278cf40, 0x56523278cf90, 0x56523278cfe0, 0x56523278d030 } +Next token is token 'p' (0x56523278d080 'p'Exception caught: cleaning lookahead and stack +0x56523278d080->Object::~Object { 0x56523278cf40, 0x56523278cf90, 0x56523278cfe0, 0x56523278d030, 0x56523278d080 } +0x56523278d030->Object::~Object { 0x56523278cf40, 0x56523278cf90, 0x56523278cfe0, 0x56523278d030 } +0x56523278cfe0->Object::~Object { 0x56523278cf40, 0x56523278cf90, 0x56523278cfe0 } +0x56523278cf90->Object::~Object { 0x56523278cf40, 0x56523278cf90 } +0x56523278cf40->Object::~Object { 0x56523278cf40 } +exception caught: printer +end { } +./glr-regression.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.c glr-regr7.y ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./c++.at:1360: $PREPARSER ./input aaaal +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x558f8497ff40->Object::Object { } +Next token is token 'a' (0x558f8497ff40 'a') +Shifting token 'a' (0x558f8497ff40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558f8497ff40 'a') +-> $$ = nterm item (0x558f8497ff40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x558f8497ff90->Object::Object { 0x558f8497ff40 } +Next token is token 'a' (0x558f8497ff90 'a') +Shifting token 'a' (0x558f8497ff90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558f8497ff90 'a') +-> $$ = nterm item (0x558f8497ff90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x558f8497ffe0->Object::Object { 0x558f8497ff40, 0x558f8497ff90 } +Next token is token 'a' (0x558f8497ffe0 'a') +Shifting token 'a' (0x558f8497ffe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558f8497ffe0 'a') +-> $$ = nterm item (0x558f8497ffe0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x558f84980030->Object::Object { 0x558f8497ff40, 0x558f8497ff90, 0x558f8497ffe0 } +Next token is token 'a' (0x558f84980030 'a') +Shifting token 'a' (0x558f84980030 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558f84980030 'a') +-> $$ = nterm item (0x558f84980030 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x558f84980080->Object::Object { 0x558f8497ff40, 0x558f8497ff90, 0x558f8497ffe0, 0x558f84980030 } +Next token is token 'p' (0x558f84980080 'p'Exception caught: cleaning lookahead and stack +0x558f84980080->Object::~Object { 0x558f8497ff40, 0x558f8497ff90, 0x558f8497ffe0, 0x558f84980030, 0x558f84980080 } +0x558f84980030->Object::~Object { 0x558f8497ff40, 0x558f8497ff90, 0x558f8497ffe0, 0x558f84980030 } +0x558f8497ffe0->Object::~Object { 0x558f8497ff40, 0x558f8497ff90, 0x558f8497ffe0 } +0x558f8497ff90->Object::~Object { 0x558f8497ff40, 0x558f8497ff90 } +0x558f8497ff40->Object::~Object { 0x558f8497ff40 } +exception caught: printer +end { } stdout: +./c++.at:1363: $PREPARSER ./input aaaal +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:235: $PREPARSER ./list +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x56523278cf40->Object::Object { } +Next token is token 'a' (0x56523278cf40 'a') +Shifting token 'a' (0x56523278cf40 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56523278cf40 'a') +-> $$ = nterm item (0x56523278cf40 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x56523278cf90->Object::Object { 0x56523278cf40 } +Next token is token 'a' (0x56523278cf90 'a') +Shifting token 'a' (0x56523278cf90 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56523278cf90 'a') +-> $$ = nterm item (0x56523278cf90 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x56523278cfe0->Object::Object { 0x56523278cf40, 0x56523278cf90 } +Next token is token 'a' (0x56523278cfe0 'a') +Shifting token 'a' (0x56523278cfe0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56523278cfe0 'a') +-> $$ = nterm item (0x56523278cfe0 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x56523278d030->Object::Object { 0x56523278cf40, 0x56523278cf90, 0x56523278cfe0 } +Next token is token 'a' (0x56523278d030 'a') +Shifting token 'a' (0x56523278d030 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56523278d030 'a') +-> $$ = nterm item (0x56523278d030 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x56523278d080->Object::Object { 0x56523278cf40, 0x56523278cf90, 0x56523278cfe0, 0x56523278d030 } +Next token is token 'p' (0x56523278d080 'p'Exception caught: cleaning lookahead and stack +0x56523278d080->Object::~Object { 0x56523278cf40, 0x56523278cf90, 0x56523278cfe0, 0x56523278d030, 0x56523278d080 } +0x56523278d030->Object::~Object { 0x56523278cf40, 0x56523278cf90, 0x56523278cfe0, 0x56523278d030 } +0x56523278cfe0->Object::~Object { 0x56523278cf40, 0x56523278cf90, 0x56523278cfe0 } +0x56523278cf90->Object::~Object { 0x56523278cf40, 0x56523278cf90 } +0x56523278cf40->Object::~Object { 0x56523278cf40 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:740: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS stdout: +======== Testing with C++ standard flags: '' +stderr: +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x558f8497ff40->Object::Object { } +Next token is token 'a' (0x558f8497ff40 'a') +Shifting token 'a' (0x558f8497ff40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558f8497ff40 'a') +-> $$ = nterm item (0x558f8497ff40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x558f8497ff90->Object::Object { 0x558f8497ff40 } +Next token is token 'a' (0x558f8497ff90 'a') +Shifting token 'a' (0x558f8497ff90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558f8497ff90 'a') +-> $$ = nterm item (0x558f8497ff90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x558f8497ffe0->Object::Object { 0x558f8497ff40, 0x558f8497ff90 } +Next token is token 'a' (0x558f8497ffe0 'a') +Shifting token 'a' (0x558f8497ffe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558f8497ffe0 'a') +-> $$ = nterm item (0x558f8497ffe0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x558f84980030->Object::Object { 0x558f8497ff40, 0x558f8497ff90, 0x558f8497ffe0 } +Next token is token 'a' (0x558f84980030 'a') +Shifting token 'a' (0x558f84980030 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558f84980030 'a') +-> $$ = nterm item (0x558f84980030 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x558f84980080->Object::Object { 0x558f8497ff40, 0x558f8497ff90, 0x558f8497ffe0, 0x558f84980030 } +Next token is token 'p' (0x558f84980080 'p'Exception caught: cleaning lookahead and stack +0x558f84980080->Object::~Object { 0x558f8497ff40, 0x558f8497ff90, 0x558f8497ffe0, 0x558f84980030, 0x558f84980080 } +0x558f84980030->Object::~Object { 0x558f8497ff40, 0x558f8497ff90, 0x558f8497ffe0, 0x558f84980030 } +0x558f8497ffe0->Object::~Object { 0x558f8497ff40, 0x558f8497ff90, 0x558f8497ffe0 } +0x558f8497ff90->Object::~Object { 0x558f8497ff40, 0x558f8497ff90 } +0x558f8497ff40->Object::~Object { 0x558f8497ff40 } +exception caught: printer +end { } +./c++.at:1361: grep '^exception caught: printer$' stderr exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1363: $PREPARSER ./input i +exception caught: printer +./c++.at:1361: $PREPARSER ./input aaaae +736. glr-regression.at:844: testing Duplicated user destructor for lookahead: glr.cc ... +stderr: +stderr: +exception caught: syntax error +./glr-regression.at:844: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in +stderr: +stderr: +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +exception caught: initial-action +./c++.at:1360: $PREPARSER ./input aaaaE +./c++.at:1362: $PREPARSER ./input aaaas +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE +stderr: +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:843: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr7 glr-regr7.c $LIBS +stderr: +./c++.at:1363: $PREPARSER ./input aaaap +./c++.at:1362: $PREPARSER ./input aaaal +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: $PREPARSER ./input aaaaT +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: yylex +stderr: +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./regression.at:1719: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +stderr: +./c++.at:1361: $PREPARSER ./input aaaaT +./c++.at:1360: $PREPARSER ./input aaaaR +stdout: +stderr: +./c++.at:1362: $PREPARSER ./input i +./c++.at:1363: $PREPARSER ./input --debug aaaap ./c++.at:659: $PREPARSER ./input +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./glr-regression.at:844: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -263230,199 +260957,662 @@ Cleanup: popping token EOI () Cleanup: popping nterm expr (40) destroy: 40 -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1360: $PREPARSER ./input i -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: +./c++.at:1361: $PREPARSER ./input aaaaR stderr: -exception caught: initial-action -stdout: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1719: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffff48971af->Object::Object { } +0x7ffff4897290->Object::Object { 0x7ffff48971af } +0x7ffff48971af->Object::~Object { 0x7ffff48971af, 0x7ffff4897290 } +Next token is token 'a' (0x7ffff4897290 'a') +0x7ffff48971d0->Object::Object { 0x7ffff4897290 } +0x7ffff4897290->Object::~Object { 0x7ffff48971d0, 0x7ffff4897290 } +Shifting token 'a' (0x7ffff48971d0 'a') +0x55fde3e392e0->Object::Object { 0x7ffff48971d0 } +0x7ffff48971d0->Object::~Object { 0x55fde3e392e0, 0x7ffff48971d0 } +Entering state 1 +Stack now 0 1 +0x7ffff48972b0->Object::Object { 0x55fde3e392e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55fde3e392e0 'a') +-> $$ = nterm item (0x7ffff48972b0 'a') +0x55fde3e392e0->Object::~Object { 0x55fde3e392e0, 0x7ffff48972b0 } +0x55fde3e392e0->Object::Object { 0x7ffff48972b0 } +0x7ffff48972b0->Object::~Object { 0x55fde3e392e0, 0x7ffff48972b0 } +Entering state 10 +Stack now 0 10 +Reading a token +0x7ffff48971af->Object::Object { 0x55fde3e392e0 } +0x7ffff4897290->Object::Object { 0x55fde3e392e0, 0x7ffff48971af } +0x7ffff48971af->Object::~Object { 0x55fde3e392e0, 0x7ffff48971af, 0x7ffff4897290 } +Next token is token 'a' (0x7ffff4897290 'a') +0x7ffff48971d0->Object::Object { 0x55fde3e392e0, 0x7ffff4897290 } +0x7ffff4897290->Object::~Object { 0x55fde3e392e0, 0x7ffff48971d0, 0x7ffff4897290 } +Shifting token 'a' (0x7ffff48971d0 'a') +0x55fde3e39300->Object::Object { 0x55fde3e392e0, 0x7ffff48971d0 } +0x7ffff48971d0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48971d0 } +Entering state 1 +Stack now 0 10 1 +0x7ffff48972b0->Object::Object { 0x55fde3e392e0, 0x55fde3e39300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55fde3e39300 'a') +-> $$ = nterm item (0x7ffff48972b0 'a') +0x55fde3e39300->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48972b0 } +0x55fde3e39300->Object::Object { 0x55fde3e392e0, 0x7ffff48972b0 } +0x7ffff48972b0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48972b0 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0x7ffff48971af->Object::Object { 0x55fde3e392e0, 0x55fde3e39300 } +0x7ffff4897290->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48971af } +0x7ffff48971af->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48971af, 0x7ffff4897290 } +Next token is token 'a' (0x7ffff4897290 'a') +0x7ffff48971d0->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff4897290 } +0x7ffff4897290->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48971d0, 0x7ffff4897290 } +Shifting token 'a' (0x7ffff48971d0 'a') +0x55fde3e39320->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48971d0 } +0x7ffff48971d0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48971d0 } +Entering state 1 +Stack now 0 10 10 1 +0x7ffff48972b0->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55fde3e39320 'a') +-> $$ = nterm item (0x7ffff48972b0 'a') +0x55fde3e39320->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48972b0 } +0x55fde3e39320->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48972b0 } +0x7ffff48972b0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48972b0 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x7ffff48971af->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320 } +0x7ffff4897290->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48971af } +0x7ffff48971af->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48971af, 0x7ffff4897290 } +Next token is token 'a' (0x7ffff4897290 'a') +0x7ffff48971d0->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff4897290 } +0x7ffff4897290->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48971d0, 0x7ffff4897290 } +Shifting token 'a' (0x7ffff48971d0 'a') +0x55fde3e39340->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48971d0 } +0x7ffff48971d0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff48971d0 } +Entering state 1 +Stack now 0 10 10 10 1 +0x7ffff48972b0->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55fde3e39340 'a') +-> $$ = nterm item (0x7ffff48972b0 'a') +0x55fde3e39340->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff48972b0 } +0x55fde3e39340->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48972b0 } +0x7ffff48972b0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff48972b0 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x7ffff48971af->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340 } +0x7ffff4897290->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff48971af } +0x7ffff48971af->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff48971af, 0x7ffff4897290 } +Next token is token 'p' (0x7ffff4897290 'p'Exception caught: cleaning lookahead and stack +0x55fde3e39340->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff4897290 } +0x55fde3e39320->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff4897290 } +0x55fde3e39300->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff4897290 } +0x55fde3e392e0->Object::~Object { 0x55fde3e392e0, 0x7ffff4897290 } +0x7ffff4897290->Object::~Object { 0x7ffff4897290 } +exception caught: printer +end { } +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:92: $PREPARSER ./input -./c++.at:1064: $PREPARSER ./input < in +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1719: grep 'syntax error,' stderr.txt +./c++.at:1362: $PREPARSER ./input aaaap stderr: -./c++.at:1360: $PREPARSER ./input aaaap +./regression.at:1719: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: invalid expression stderr: -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffff48971af->Object::Object { } +0x7ffff4897290->Object::Object { 0x7ffff48971af } +0x7ffff48971af->Object::~Object { 0x7ffff48971af, 0x7ffff4897290 } +Next token is token 'a' (0x7ffff4897290 'a') +0x7ffff48971d0->Object::Object { 0x7ffff4897290 } +0x7ffff4897290->Object::~Object { 0x7ffff48971d0, 0x7ffff4897290 } +Shifting token 'a' (0x7ffff48971d0 'a') +0x55fde3e392e0->Object::Object { 0x7ffff48971d0 } +0x7ffff48971d0->Object::~Object { 0x55fde3e392e0, 0x7ffff48971d0 } +Entering state 1 +Stack now 0 1 +0x7ffff48972b0->Object::Object { 0x55fde3e392e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55fde3e392e0 'a') +-> $$ = nterm item (0x7ffff48972b0 'a') +0x55fde3e392e0->Object::~Object { 0x55fde3e392e0, 0x7ffff48972b0 } +0x55fde3e392e0->Object::Object { 0x7ffff48972b0 } +0x7ffff48972b0->Object::~Object { 0x55fde3e392e0, 0x7ffff48972b0 } +Entering state 10 +Stack now 0 10 +Reading a token +0x7ffff48971af->Object::Object { 0x55fde3e392e0 } +0x7ffff4897290->Object::Object { 0x55fde3e392e0, 0x7ffff48971af } +0x7ffff48971af->Object::~Object { 0x55fde3e392e0, 0x7ffff48971af, 0x7ffff4897290 } +Next token is token 'a' (0x7ffff4897290 'a') +0x7ffff48971d0->Object::Object { 0x55fde3e392e0, 0x7ffff4897290 } +0x7ffff4897290->Object::~Object { 0x55fde3e392e0, 0x7ffff48971d0, 0x7ffff4897290 } +Shifting token 'a' (0x7ffff48971d0 'a') +0x55fde3e39300->Object::Object { 0x55fde3e392e0, 0x7ffff48971d0 } +0x7ffff48971d0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48971d0 } +Entering state 1 +Stack now 0 10 1 +0x7ffff48972b0->Object::Object { 0x55fde3e392e0, 0x55fde3e39300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55fde3e39300 'a') +-> $$ = nterm item (0x7ffff48972b0 'a') +0x55fde3e39300->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48972b0 } +0x55fde3e39300->Object::Object { 0x55fde3e392e0, 0x7ffff48972b0 } +0x7ffff48972b0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48972b0 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0x7ffff48971af->Object::Object { 0x55fde3e392e0, 0x55fde3e39300 } +0x7ffff4897290->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48971af } +0x7ffff48971af->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48971af, 0x7ffff4897290 } +Next token is token 'a' (0x7ffff4897290 'a') +0x7ffff48971d0->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff4897290 } +0x7ffff4897290->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48971d0, 0x7ffff4897290 } +Shifting token 'a' (0x7ffff48971d0 'a') +0x55fde3e39320->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48971d0 } +0x7ffff48971d0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48971d0 } +Entering state 1 +Stack now 0 10 10 1 +0x7ffff48972b0->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55fde3e39320 'a') +-> $$ = nterm item (0x7ffff48972b0 'a') +0x55fde3e39320->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48972b0 } +0x55fde3e39320->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff48972b0 } +0x7ffff48972b0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48972b0 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x7ffff48971af->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320 } +0x7ffff4897290->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48971af } +0x7ffff48971af->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48971af, 0x7ffff4897290 } +Next token is token 'a' (0x7ffff4897290 'a') +0x7ffff48971d0->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff4897290 } +0x7ffff4897290->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48971d0, 0x7ffff4897290 } +Shifting token 'a' (0x7ffff48971d0 'a') +0x55fde3e39340->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48971d0 } +0x7ffff48971d0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff48971d0 } +Entering state 1 +Stack now 0 10 10 10 1 +0x7ffff48972b0->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55fde3e39340 'a') +-> $$ = nterm item (0x7ffff48972b0 'a') +0x55fde3e39340->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff48972b0 } +0x55fde3e39340->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff48972b0 } +0x7ffff48972b0->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff48972b0 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x7ffff48971af->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340 } +0x7ffff4897290->Object::Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff48971af } +0x7ffff48971af->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff48971af, 0x7ffff4897290 } +Next token is token 'p' (0x7ffff4897290 'p'Exception caught: cleaning lookahead and stack +0x55fde3e39340->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x55fde3e39340, 0x7ffff4897290 } +0x55fde3e39320->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x55fde3e39320, 0x7ffff4897290 } +0x55fde3e39300->Object::~Object { 0x55fde3e392e0, 0x55fde3e39300, 0x7ffff4897290 } +0x55fde3e392e0->Object::~Object { 0x55fde3e392e0, 0x7ffff4897290 } +0x7ffff4897290->Object::~Object { 0x7ffff4897290 } +exception caught: printer +end { } +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: grep '^exception caught: printer$' stderr +stdout: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1719: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +exception caught: printer +./c++.at:1362: $PREPARSER ./input --debug aaaap +./c++.at:1363: $PREPARSER ./input aaaae ======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:1360: $PREPARSER ./input --debug aaaap -732. glr-regression.at:738: testing User destructor after an error during a split parse: glr.c ... -731. glr-regression.at:672: testing User destructor for unresolved GLR semantic value: glr2.cc ... -./glr-regression.at:738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.c glr-regr6.y -./c++.at:1064: $PREPARSER ./input < in -./glr-regression.at:672: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y stderr: +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Starting parse Entering state 0 Stack now 0 Reading a token -0x563f019baf40->Object::Object { } -Next token is token 'a' (0x563f019baf40 'a') -Shifting token 'a' (0x563f019baf40 'a') +0x7ffd87e4fe9f->Object::Object { } +0x7ffd87e4ff80->Object::Object { 0x7ffd87e4fe9f } +0x7ffd87e4fe9f->Object::~Object { 0x7ffd87e4fe9f, 0x7ffd87e4ff80 } +Next token is token 'a' (0x7ffd87e4ff80 'a') +0x7ffd87e4fec0->Object::Object { 0x7ffd87e4ff80 } +0x7ffd87e4ff80->Object::~Object { 0x7ffd87e4fec0, 0x7ffd87e4ff80 } +Shifting token 'a' (0x7ffd87e4fec0 'a') +0x562f949b22e0->Object::Object { 0x7ffd87e4fec0 } +0x7ffd87e4fec0->Object::~Object { 0x562f949b22e0, 0x7ffd87e4fec0 } Entering state 2 Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x563f019baf40 'a') --> $$ = nterm item (0x563f019baf40 'a') +0x7ffd87e4ffa0->Object::Object { 0x562f949b22e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x562f949b22e0 'a') +-> $$ = nterm item (0x7ffd87e4ffa0 'a') +0x562f949b22e0->Object::~Object { 0x562f949b22e0, 0x7ffd87e4ffa0 } +0x562f949b22e0->Object::Object { 0x7ffd87e4ffa0 } +0x7ffd87e4ffa0->Object::~Object { 0x562f949b22e0, 0x7ffd87e4ffa0 } Entering state 11 Stack now 0 11 Reading a token -0x563f019baf90->Object::Object { 0x563f019baf40 } -Next token is token 'a' (0x563f019baf90 'a') -Shifting token 'a' (0x563f019baf90 'a') +0x7ffd87e4fe9f->Object::Object { 0x562f949b22e0 } +0x7ffd87e4ff80->Object::Object { 0x562f949b22e0, 0x7ffd87e4fe9f } +0x7ffd87e4fe9f->Object::~Object { 0x562f949b22e0, 0x7ffd87e4fe9f, 0x7ffd87e4ff80 } +Next token is token 'a' (0x7ffd87e4ff80 'a') +0x7ffd87e4fec0->Object::Object { 0x562f949b22e0, 0x7ffd87e4ff80 } +0x7ffd87e4ff80->Object::~Object { 0x562f949b22e0, 0x7ffd87e4fec0, 0x7ffd87e4ff80 } +Shifting token 'a' (0x7ffd87e4fec0 'a') +0x562f949b2300->Object::Object { 0x562f949b22e0, 0x7ffd87e4fec0 } +0x7ffd87e4fec0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4fec0 } Entering state 2 Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x563f019baf90 'a') --> $$ = nterm item (0x563f019baf90 'a') +0x7ffd87e4ffa0->Object::Object { 0x562f949b22e0, 0x562f949b2300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x562f949b2300 'a') +-> $$ = nterm item (0x7ffd87e4ffa0 'a') +0x562f949b2300->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4ffa0 } +0x562f949b2300->Object::Object { 0x562f949b22e0, 0x7ffd87e4ffa0 } +0x7ffd87e4ffa0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4ffa0 } Entering state 11 Stack now 0 11 11 Reading a token -0x563f019bafe0->Object::Object { 0x563f019baf40, 0x563f019baf90 } -Next token is token 'a' (0x563f019bafe0 'a') -Shifting token 'a' (0x563f019bafe0 'a') +0x7ffd87e4fe9f->Object::Object { 0x562f949b22e0, 0x562f949b2300 } +0x7ffd87e4ff80->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4fe9f } +0x7ffd87e4fe9f->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4fe9f, 0x7ffd87e4ff80 } +Next token is token 'a' (0x7ffd87e4ff80 'a') +0x7ffd87e4fec0->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4ff80 } +0x7ffd87e4ff80->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4fec0, 0x7ffd87e4ff80 } +Shifting token 'a' (0x7ffd87e4fec0 'a') +0x562f949b2320->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4fec0 } +0x7ffd87e4fec0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4fec0 } Entering state 2 Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x563f019bafe0 'a') --> $$ = nterm item (0x563f019bafe0 'a') +0x7ffd87e4ffa0->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x562f949b2320 'a') +-> $$ = nterm item (0x7ffd87e4ffa0 'a') +0x562f949b2320->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4ffa0 } +0x562f949b2320->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4ffa0 } +0x7ffd87e4ffa0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4ffa0 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x563f019bb030->Object::Object { 0x563f019baf40, 0x563f019baf90, 0x563f019bafe0 } -Next token is token 'a' (0x563f019bb030 'a') -Shifting token 'a' (0x563f019bb030 'a') +0x7ffd87e4fe9f->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320 } +0x7ffd87e4ff80->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4fe9f } +0x7ffd87e4fe9f->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4fe9f, 0x7ffd87e4ff80 } +Next token is token 'a' (0x7ffd87e4ff80 'a') +0x7ffd87e4fec0->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4ff80 } +0x7ffd87e4ff80->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4fec0, 0x7ffd87e4ff80 } +Shifting token 'a' (0x7ffd87e4fec0 'a') +0x562f949b2340->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4fec0 } +0x7ffd87e4fec0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4fec0 } Entering state 2 Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x563f019bb030 'a') --> $$ = nterm item (0x563f019bb030 'a') +0x7ffd87e4ffa0->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x562f949b2340 'a') +-> $$ = nterm item (0x7ffd87e4ffa0 'a') +0x562f949b2340->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4ffa0 } +0x562f949b2340->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4ffa0 } +0x7ffd87e4ffa0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4ffa0 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x563f019bb080->Object::Object { 0x563f019baf40, 0x563f019baf90, 0x563f019bafe0, 0x563f019bb030 } -Next token is token 'p' (0x563f019bb080 'p'Exception caught: cleaning lookahead and stack -0x563f019bb080->Object::~Object { 0x563f019baf40, 0x563f019baf90, 0x563f019bafe0, 0x563f019bb030, 0x563f019bb080 } -0x563f019bb030->Object::~Object { 0x563f019baf40, 0x563f019baf90, 0x563f019bafe0, 0x563f019bb030 } -0x563f019bafe0->Object::~Object { 0x563f019baf40, 0x563f019baf90, 0x563f019bafe0 } -0x563f019baf90->Object::~Object { 0x563f019baf40, 0x563f019baf90 } -0x563f019baf40->Object::~Object { 0x563f019baf40 } +0x7ffd87e4fe9f->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340 } +0x7ffd87e4ff80->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4fe9f } +0x7ffd87e4fe9f->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4fe9f, 0x7ffd87e4ff80 } +Next token is token 'p' (0x7ffd87e4ff80 'p'Exception caught: cleaning lookahead and stack +0x562f949b2340->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4ff80 } +0x562f949b2320->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4ff80 } +0x562f949b2300->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4ff80 } +0x562f949b22e0->Object::~Object { 0x562f949b22e0, 0x7ffd87e4ff80 } +0x7ffd87e4ff80->Object::~Object { 0x7ffd87e4ff80 } exception caught: printer end { } stderr: -error: invalid character -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.java input.y +exception caught: syntax error +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:1363: $PREPARSER ./input aaaaE Starting parse Entering state 0 Stack now 0 Reading a token -0x563f019baf40->Object::Object { } -Next token is token 'a' (0x563f019baf40 'a') -Shifting token 'a' (0x563f019baf40 'a') +0x7ffd87e4fe9f->Object::Object { } +0x7ffd87e4ff80->Object::Object { 0x7ffd87e4fe9f } +0x7ffd87e4fe9f->Object::~Object { 0x7ffd87e4fe9f, 0x7ffd87e4ff80 } +Next token is token 'a' (0x7ffd87e4ff80 'a') +0x7ffd87e4fec0->Object::Object { 0x7ffd87e4ff80 } +0x7ffd87e4ff80->Object::~Object { 0x7ffd87e4fec0, 0x7ffd87e4ff80 } +Shifting token 'a' (0x7ffd87e4fec0 'a') +0x562f949b22e0->Object::Object { 0x7ffd87e4fec0 } +0x7ffd87e4fec0->Object::~Object { 0x562f949b22e0, 0x7ffd87e4fec0 } Entering state 2 Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x563f019baf40 'a') --> $$ = nterm item (0x563f019baf40 'a') +0x7ffd87e4ffa0->Object::Object { 0x562f949b22e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x562f949b22e0 'a') +-> $$ = nterm item (0x7ffd87e4ffa0 'a') +0x562f949b22e0->Object::~Object { 0x562f949b22e0, 0x7ffd87e4ffa0 } +0x562f949b22e0->Object::Object { 0x7ffd87e4ffa0 } +0x7ffd87e4ffa0->Object::~Object { 0x562f949b22e0, 0x7ffd87e4ffa0 } Entering state 11 Stack now 0 11 Reading a token -0x563f019baf90->Object::Object { 0x563f019baf40 } -Next token is token 'a' (0x563f019baf90 'a') -Shifting token 'a' (0x563f019baf90 'a') +0x7ffd87e4fe9f->Object::Object { 0x562f949b22e0 } +0x7ffd87e4ff80->Object::Object { 0x562f949b22e0, 0x7ffd87e4fe9f } +0x7ffd87e4fe9f->Object::~Object { 0x562f949b22e0, 0x7ffd87e4fe9f, 0x7ffd87e4ff80 } +Next token is token 'a' (0x7ffd87e4ff80 'a') +0x7ffd87e4fec0->Object::Object { 0x562f949b22e0, 0x7ffd87e4ff80 } +0x7ffd87e4ff80->Object::~Object { 0x562f949b22e0, 0x7ffd87e4fec0, 0x7ffd87e4ff80 } +Shifting token 'a' (0x7ffd87e4fec0 'a') +0x562f949b2300->Object::Object { 0x562f949b22e0, 0x7ffd87e4fec0 } +0x7ffd87e4fec0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4fec0 } Entering state 2 Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x563f019baf90 'a') --> $$ = nterm item (0x563f019baf90 'a') +0x7ffd87e4ffa0->Object::Object { 0x562f949b22e0, 0x562f949b2300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x562f949b2300 'a') +-> $$ = nterm item (0x7ffd87e4ffa0 'a') +0x562f949b2300->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4ffa0 } +0x562f949b2300->Object::Object { 0x562f949b22e0, 0x7ffd87e4ffa0 } +0x7ffd87e4ffa0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4ffa0 } Entering state 11 Stack now 0 11 11 Reading a token -0x563f019bafe0->Object::Object { 0x563f019baf40, 0x563f019baf90 } -Next token is token 'a' (0x563f019bafe0 'a') -Shifting token 'a' (0x563f019bafe0 'a') +0x7ffd87e4fe9f->Object::Object { 0x562f949b22e0, 0x562f949b2300 } +0x7ffd87e4ff80->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4fe9f } +0x7ffd87e4fe9f->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4fe9f, 0x7ffd87e4ff80 } +Next token is token 'a' (0x7ffd87e4ff80 'a') +0x7ffd87e4fec0->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4ff80 } +0x7ffd87e4ff80->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4fec0, 0x7ffd87e4ff80 } +Shifting token 'a' (0x7ffd87e4fec0 'a') +0x562f949b2320->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4fec0 } +0x7ffd87e4fec0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4fec0 } Entering state 2 Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x563f019bafe0 'a') --> $$ = nterm item (0x563f019bafe0 'a') +0x7ffd87e4ffa0->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x562f949b2320 'a') +-> $$ = nterm item (0x7ffd87e4ffa0 'a') +0x562f949b2320->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4ffa0 } +0x562f949b2320->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4ffa0 } +0x7ffd87e4ffa0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4ffa0 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x563f019bb030->Object::Object { 0x563f019baf40, 0x563f019baf90, 0x563f019bafe0 } -Next token is token 'a' (0x563f019bb030 'a') -Shifting token 'a' (0x563f019bb030 'a') +0x7ffd87e4fe9f->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320 } +0x7ffd87e4ff80->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4fe9f } +0x7ffd87e4fe9f->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4fe9f, 0x7ffd87e4ff80 } +Next token is token 'a' (0x7ffd87e4ff80 'a') +0x7ffd87e4fec0->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4ff80 } +0x7ffd87e4ff80->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4fec0, 0x7ffd87e4ff80 } +Shifting token 'a' (0x7ffd87e4fec0 'a') +0x562f949b2340->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4fec0 } +0x7ffd87e4fec0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4fec0 } Entering state 2 Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x563f019bb030 'a') --> $$ = nterm item (0x563f019bb030 'a') +0x7ffd87e4ffa0->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x562f949b2340 'a') +-> $$ = nterm item (0x7ffd87e4ffa0 'a') +0x562f949b2340->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4ffa0 } +0x562f949b2340->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4ffa0 } +0x7ffd87e4ffa0->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4ffa0 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x563f019bb080->Object::Object { 0x563f019baf40, 0x563f019baf90, 0x563f019bafe0, 0x563f019bb030 } -Next token is token 'p' (0x563f019bb080 'p'Exception caught: cleaning lookahead and stack -0x563f019bb080->Object::~Object { 0x563f019baf40, 0x563f019baf90, 0x563f019bafe0, 0x563f019bb030, 0x563f019bb080 } -0x563f019bb030->Object::~Object { 0x563f019baf40, 0x563f019baf90, 0x563f019bafe0, 0x563f019bb030 } -0x563f019bafe0->Object::~Object { 0x563f019baf40, 0x563f019baf90, 0x563f019bafe0 } -0x563f019baf90->Object::~Object { 0x563f019baf40, 0x563f019baf90 } -0x563f019baf40->Object::~Object { 0x563f019baf40 } +0x7ffd87e4fe9f->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340 } +0x7ffd87e4ff80->Object::Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4fe9f } +0x7ffd87e4fe9f->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4fe9f, 0x7ffd87e4ff80 } +Next token is token 'p' (0x7ffd87e4ff80 'p'Exception caught: cleaning lookahead and stack +0x562f949b2340->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x562f949b2340, 0x7ffd87e4ff80 } +0x562f949b2320->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x562f949b2320, 0x7ffd87e4ff80 } +0x562f949b2300->Object::~Object { 0x562f949b22e0, 0x562f949b2300, 0x7ffd87e4ff80 } +0x562f949b22e0->Object::~Object { 0x562f949b22e0, 0x7ffd87e4ff80 } +0x7ffd87e4ff80->Object::~Object { 0x7ffd87e4ff80 } exception caught: printer end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -======== Testing with C++ standard flags: '' +./c++.at:1362: grep '^exception caught: printer$' stderr stdout: exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaae stderr: +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -624. existing.at:1460: ok -./c++.at:1360: $PREPARSER ./input aaaaE +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +651. regression.at:1628: skipped (regression.at:1727) +./c++.at:1363: $PREPARSER ./input aaaaT +./c++.at:1362: $PREPARSER ./input aaaaT stderr: -./glr-regression.at:738: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr6 glr-regr6.c $LIBS +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./c++.at:1363: $PREPARSER ./input aaaaR +./c++.at:1362: $PREPARSER ./input aaaaR +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./glr-regression.at:672: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS stdout: -./c++.at:1555: $PREPARSER ./test -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT +737. glr-regression.at:845: testing Duplicated user destructor for lookahead: glr2.cc ... +./glr-regression.at:845: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' stderr: +stdout: +./glr-regression.at:593: $PREPARSER ./glr-regr4 stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./glr-regression.at:489: $PREPARSER ./glr-regr3 input.txt +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:845: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS +stderr: +stderr: +./glr-regression.at:489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:593: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +721. glr-regression.at:489: stdout: + ok +724. glr-regression.at:593: ok +./glr-regression.at:597: $PREPARSER ./glr-regr4 + -./c++.at:1360: $PREPARSER ./input aaaaR -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +stderr: +stdout: +error: invalid expression +caught error +error: invalid character +caught error +738. glr-regression.at:944: testing Incorrectly initialized location for empty right-hand side in GLR: glr.c ... +739. glr-regression.at:945: testing Incorrectly initialized location for empty right-hand side in GLR: glr.cc ... +./glr-regression.at:944: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.c glr-regr8.y +./glr-regression.at:945: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y +stderr: +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +./glr-regression.at:597: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./glr-regression.at:738: $PREPARSER ./glr-regr6 +./c++.at:1065: $PREPARSER ./input < in +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:944: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr8 glr-regr8.c $LIBS +./glr-regression.at:945: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS +stderr: +stderr: +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +syntax error +Discarding 'a'. +Reducing 'a'. +./glr-regression.at:738: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +727. glr-regression.at:597: ok + +stderr: +stdout: +./c++.at:1066: $PREPARSER ./input < in +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +740. glr-regression.at:946: testing Incorrectly initialized location for empty right-hand side in GLR: glr2.cc ... +stderr: +stderr: +./glr-regression.at:946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid expression +caught error +error: invalid character +caught error +732. glr-regression.at:738: ok + +./c++.at:1064: $PREPARSER ./input < in +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 1876 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 144 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./glr-regression.at:946: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS +./c++.at:1066: $PREPARSER ./input < in +stderr: +stderr: +error: invalid expression ======== Testing with C++ standard flags: '' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stdout: +./glr-regression.at:207: $PREPARSER ./glr-regr1 BPBPB +741. glr-regression.at:1036: testing No users destructors if stack 0 deleted: glr.c ... +./glr-regression.at:1036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.c glr-regr9.y +stderr: +error: invalid expression stderr: stdout: -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: ./c++.at:566: $here/modern stdout: -Modern C++: 201103 +stderr: +stdout: +./glr-regression.at:843: $PREPARSER ./glr-regr7 +./c++.at:1065: $PREPARSER ./input < in +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./glr-regression.at:1036: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr9 glr-regr9.c $LIBS +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 +./c++.at:1066: $PREPARSER ./input < in +stderr: +memory exhausted +stderr: +./glr-regression.at:843: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +error: invalid expression +stderr: +Legac++ ./c++.at:566: $PREPARSER ./list stderr: +./glr-regression.at:207: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +======== Testing with C++ standard flags: '' +stderr: +stderr: +stdout: +stdout: +stderr: +./c++.at:855: $PREPARSER ./input +./c++.at:849: $PREPARSER ./input +stdout: +./c++.at:1064: $PREPARSER ./input < in +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +735. glr-regression.at:843: ok +stderr: +stderr: +stderr: +stdout: +stderr: +stderr: +stderr: +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1555: $PREPARSER ./test +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid character Destroy: "0" Destroy: "0" Destroy: 1 @@ -263444,33 +261634,239 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +stdout: +716. glr-regression.at:207: ok + + +742. glr-regression.at:1037: testing No users destructors if stack 0 deleted: glr.cc ... +./glr-regression.at:1037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' ./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +stderr: +stderr: +743. glr-regression.at:1038: testing No users destructors if stack 0 deleted: glr2.cc ... +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1065: $PREPARSER ./input < in +./glr-regression.at:1038: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -733. glr-regression.at:739: testing User destructor after an error during a split parse: glr.cc ... -./glr-regression.at:739: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y +stderr: +error: invalid character +======== Testing with C++ standard flags: '' +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./glr-regression.at:1037: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./glr-regression.at:1038: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS stderr: stderr: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' +stdout: +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: stdout: +./c++.at:92: $PREPARSER ./input stdout: -./glr-regression.at:739: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS -./glr-regression.at:596: $PREPARSER ./glr-regr4 +./c++.at:568: $here/modern +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Legac++ +./c++.at:568: $PREPARSER ./list +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +./glr-regression.at:671: $PREPARSER ./glr-regr5 +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +stderr: +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:671: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +730. glr-regression.at:671: ok +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: +stdout: +stderr: +./glr-regression.at:739: $PREPARSER ./glr-regr6 +stdout: ./c++.at:1555: ./check + +stderr: ./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:739: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +733. glr-regression.at:739: ok +stderr: stdout: +stdout: +./c++.at:1066: ./check +./glr-regression.at:844: $PREPARSER ./glr-regr7 +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./c++.at:570: $here/modern -./glr-regression.at:596: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted + +./glr-regression.at:844: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +736. glr-regression.at:844: ./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + ok +744. glr-regression.at:1102: testing Corrupted semantic options if user action cuts parse: glr.c ... +./glr-regression.at:1102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.c glr-regr10.y + +stderr: +stdout: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:567: $here/modern +./glr-regression.at:1102: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr10 glr-regr10.c $LIBS +stdout: +stderr: +stdout: +Legac++ +./c++.at:567: $PREPARSER ./list +./c++.at:235: $PREPARSER ./list +745. glr-regression.at:1103: testing Corrupted semantic options if user action cuts parse: glr.cc ... +stderr: +./glr-regression.at:1103: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stdout: +./c++.at:569: $here/modern +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stderr: +stdout: +./c++.at:1066: ./check +stdout: stdout: Legac++ +746. glr-regression.at:1104: testing Corrupted semantic options if user action cuts parse: glr2.cc ... +./glr-regression.at:944: $PREPARSER ./glr-regr8 +./glr-regression.at:1104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./c++.at:569: $PREPARSER ./list stderr: -./c++.at:570: $PREPARSER ./list +stderr: +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: -726. glr-regression.at:596: ok stderr: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:570: $here/modern +./glr-regression.at:1103: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS +./glr-regression.at:944: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +Legac++ +./c++.at:570: $PREPARSER ./list +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +738. glr-regression.at:944: ok +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y Destroy: "0" Destroy: "0" Destroy: 1 @@ -263492,494 +261888,1279 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +./glr-regression.at:1104: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS ./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -734. glr-regression.at:740: testing User destructor after an error during a split parse: glr2.cc ... -./glr-regression.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./glr-regression.at:740: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:849: $PREPARSER ./input +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:855: $PREPARSER ./input +stdout: +./c++.at:573: $here/modern +./glr-regression.at:490: $PREPARSER ./glr-regr3 input.txt +stdout: stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:490: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Legac++ +./c++.at:573: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +722. glr-regression.at:490: ok +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +747. glr-regression.at:1174: testing Undesirable destructors if user action cuts parse: glr.c ... + +./glr-regression.at:1174: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.c glr-regr11.y +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./glr-regression.at:206: $PREPARSER ./glr-regr1 BPBPB +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: -./glr-regression.at:206: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -715. glr-regression.at:206: ok +stdout: +./glr-regression.at:1174: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr11 glr-regr11.c $LIBS +./c++.at:1360: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaal +stderr: +stdout: +./glr-regression.at:1036: $PREPARSER ./glr-regr9 +stderr: +stdout: +stderr: +748. glr-regression.at:1175: testing Undesirable destructors if user action cuts parse: glr.cc ... +./glr-regression.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y +exception caught: yylex +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: $here/modern +stderr: +memory exhausted +./glr-regression.at:1036: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1360: $PREPARSER ./input i +Legac++ +stderr: +exception caught: initial-action +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:571: $PREPARSER ./list +./c++.at:659: $PREPARSER ./input +741. glr-regression.at:1036: ok +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaap +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:1360: $PREPARSER ./input --debug aaaap +./glr-regression.at:1175: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS stderr: stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -735. glr-regression.at:843: testing Duplicated user destructor for lookahead: glr.c ... -./glr-regression.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.c glr-regr7.y stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5626e7384f40->Object::Object { } +Next token is token 'a' (0x5626e7384f40 'a') +Shifting token 'a' (0x5626e7384f40 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626e7384f40 'a') +-> $$ = nterm item (0x5626e7384f40 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x5626e7384f90->Object::Object { 0x5626e7384f40 } +Next token is token 'a' (0x5626e7384f90 'a') +Shifting token 'a' (0x5626e7384f90 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626e7384f90 'a') +-> $$ = nterm item (0x5626e7384f90 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x5626e7384fe0->Object::Object { 0x5626e7384f40, 0x5626e7384f90 } +Next token is token 'a' (0x5626e7384fe0 'a') +Shifting token 'a' (0x5626e7384fe0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626e7384fe0 'a') +-> $$ = nterm item (0x5626e7384fe0 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x5626e7385030->Object::Object { 0x5626e7384f40, 0x5626e7384f90, 0x5626e7384fe0 } +Next token is token 'a' (0x5626e7385030 'a') +Shifting token 'a' (0x5626e7385030 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626e7385030 'a') +-> $$ = nterm item (0x5626e7385030 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x5626e7385080->Object::Object { 0x5626e7384f40, 0x5626e7384f90, 0x5626e7384fe0, 0x5626e7385030 } +Next token is token 'p' (0x5626e7385080 'p'Exception caught: cleaning lookahead and stack +0x5626e7385080->Object::~Object { 0x5626e7384f40, 0x5626e7384f90, 0x5626e7384fe0, 0x5626e7385030, 0x5626e7385080 } +0x5626e7385030->Object::~Object { 0x5626e7384f40, 0x5626e7384f90, 0x5626e7384fe0, 0x5626e7385030 } +0x5626e7384fe0->Object::~Object { 0x5626e7384f40, 0x5626e7384f90, 0x5626e7384fe0 } +0x5626e7384f90->Object::~Object { 0x5626e7384f40, 0x5626e7384f90 } +0x5626e7384f40->Object::~Object { 0x5626e7384f40 } +exception caught: printer +end { } +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:356: $PREPARSER ./glr-regr2a input1.txt +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5626e7384f40->Object::Object { } +Next token is token 'a' (0x5626e7384f40 'a') +Shifting token 'a' (0x5626e7384f40 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626e7384f40 'a') +-> $$ = nterm item (0x5626e7384f40 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x5626e7384f90->Object::Object { 0x5626e7384f40 } +Next token is token 'a' (0x5626e7384f90 'a') +Shifting token 'a' (0x5626e7384f90 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626e7384f90 'a') +-> $$ = nterm item (0x5626e7384f90 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x5626e7384fe0->Object::Object { 0x5626e7384f40, 0x5626e7384f90 } +Next token is token 'a' (0x5626e7384fe0 'a') +Shifting token 'a' (0x5626e7384fe0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626e7384fe0 'a') +-> $$ = nterm item (0x5626e7384fe0 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x5626e7385030->Object::Object { 0x5626e7384f40, 0x5626e7384f90, 0x5626e7384fe0 } +Next token is token 'a' (0x5626e7385030 'a') +Shifting token 'a' (0x5626e7385030 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626e7385030 'a') +-> $$ = nterm item (0x5626e7385030 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x5626e7385080->Object::Object { 0x5626e7384f40, 0x5626e7384f90, 0x5626e7384fe0, 0x5626e7385030 } +Next token is token 'p' (0x5626e7385080 'p'Exception caught: cleaning lookahead and stack +0x5626e7385080->Object::~Object { 0x5626e7384f40, 0x5626e7384f90, 0x5626e7384fe0, 0x5626e7385030, 0x5626e7385080 } +0x5626e7385030->Object::~Object { 0x5626e7384f40, 0x5626e7384f90, 0x5626e7384fe0, 0x5626e7385030 } +0x5626e7384fe0->Object::~Object { 0x5626e7384f40, 0x5626e7384f90, 0x5626e7384fe0 } +0x5626e7384f90->Object::~Object { 0x5626e7384f40, 0x5626e7384f90 } +0x5626e7384f40->Object::~Object { 0x5626e7384f40 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr +stderr: +stdout: +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: printer +stderr: +749. glr-regression.at:1176: testing Undesirable destructors if user action cuts parse: glr2.cc ... +./glr-regression.at:1176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y +./c++.at:1360: $PREPARSER ./input aaaae +./glr-regression.at:356: $PREPARSER ./glr-regr2a input2.txt stdout: +stderr: ./c++.at:1361: $PREPARSER ./input aaaas +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./glr-regression.at:843: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr7 glr-regr7.c $LIBS exception caught: reduction -stderr: -stdout: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaas +./c++.at:1360: $PREPARSER ./input aaaaE stderr: -exception caught: reduction ./c++.at:1361: $PREPARSER ./input aaaal -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: yylex +./glr-regression.at:356: $PREPARSER ./glr-regr2a input3.txt ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1361: $PREPARSER ./input i -./c++.at:1363: $PREPARSER ./input aaaal stderr: +./glr-regression.at:1176: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS +stderr: +./c++.at:1360: $PREPARSER ./input aaaaT +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: yylex -./glr-regression.at:489: $PREPARSER ./glr-regr3 input.txt -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +719. glr-regression.at:356: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +stdout: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:594: $PREPARSER ./glr-regr4 +stderr: +stdout: +stderr: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1360: $PREPARSER ./input aaaaR +./glr-regression.at:594: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaap stderr: + +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i +======== Testing with C++ standard flags: '' +725. glr-regression.at:594: ok ./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: -721. glr-regression.at:489: exception caught: initial-action +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./c++.at:572: $here/modern stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x5652f8e63f40->Object::Object { } -Next token is token 'a' (0x5652f8e63f40 'a') -Shifting token 'a' (0x5652f8e63f40 'a') +0x564ac2c4bf40->Object::Object { } +Next token is token 'a' (0x564ac2c4bf40 'a') +Shifting token 'a' (0x564ac2c4bf40 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5652f8e63f40 'a') --> $$ = nterm item (0x5652f8e63f40 'a') + $1 = token 'a' (0x564ac2c4bf40 'a') +-> $$ = nterm item (0x564ac2c4bf40 'a') Entering state 10 Stack now 0 10 Reading a token -0x5652f8e63f90->Object::Object { 0x5652f8e63f40 } -Next token is token 'a' (0x5652f8e63f90 'a') -Shifting token 'a' (0x5652f8e63f90 'a') +0x564ac2c4bf90->Object::Object { 0x564ac2c4bf40 } +Next token is token 'a' (0x564ac2c4bf90 'a') +Shifting token 'a' (0x564ac2c4bf90 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5652f8e63f90 'a') --> $$ = nterm item (0x5652f8e63f90 'a') + $1 = token 'a' (0x564ac2c4bf90 'a') +-> $$ = nterm item (0x564ac2c4bf90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x5652f8e63fe0->Object::Object { 0x5652f8e63f40, 0x5652f8e63f90 } -Next token is token 'a' (0x5652f8e63fe0 'a') -Shifting token 'a' (0x5652f8e63fe0 'a') +0x564ac2c4bfe0->Object::Object { 0x564ac2c4bf40, 0x564ac2c4bf90 } +Next token is token 'a' (0x564ac2c4bfe0 'a') +Shifting token 'a' (0x564ac2c4bfe0 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5652f8e63fe0 'a') --> $$ = nterm item (0x5652f8e63fe0 'a') + $1 = token 'a' (0x564ac2c4bfe0 'a') +-> $$ = nterm item (0x564ac2c4bfe0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x5652f8e64030->Object::Object { 0x5652f8e63f40, 0x5652f8e63f90, 0x5652f8e63fe0 } -Next token is token 'a' (0x5652f8e64030 'a') -Shifting token 'a' (0x5652f8e64030 'a') +0x564ac2c4c030->Object::Object { 0x564ac2c4bf40, 0x564ac2c4bf90, 0x564ac2c4bfe0 } +Next token is token 'a' (0x564ac2c4c030 'a') +Shifting token 'a' (0x564ac2c4c030 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5652f8e64030 'a') --> $$ = nterm item (0x5652f8e64030 'a') + $1 = token 'a' (0x564ac2c4c030 'a') +-> $$ = nterm item (0x564ac2c4c030 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x5652f8e64080->Object::Object { 0x5652f8e63f40, 0x5652f8e63f90, 0x5652f8e63fe0, 0x5652f8e64030 } -Next token is token 'p' (0x5652f8e64080 'p'Exception caught: cleaning lookahead and stack -0x5652f8e64080->Object::~Object { 0x5652f8e63f40, 0x5652f8e63f90, 0x5652f8e63fe0, 0x5652f8e64030, 0x5652f8e64080 } -0x5652f8e64030->Object::~Object { 0x5652f8e63f40, 0x5652f8e63f90, 0x5652f8e63fe0, 0x5652f8e64030 } -0x5652f8e63fe0->Object::~Object { 0x5652f8e63f40, 0x5652f8e63f90, 0x5652f8e63fe0 } -0x5652f8e63f90->Object::~Object { 0x5652f8e63f40, 0x5652f8e63f90 } -0x5652f8e63f40->Object::~Object { 0x5652f8e63f40 } +0x564ac2c4c080->Object::Object { 0x564ac2c4bf40, 0x564ac2c4bf90, 0x564ac2c4bfe0, 0x564ac2c4c030 } +Next token is token 'p' (0x564ac2c4c080 'p'Exception caught: cleaning lookahead and stack +0x564ac2c4c080->Object::~Object { 0x564ac2c4bf40, 0x564ac2c4bf90, 0x564ac2c4bfe0, 0x564ac2c4c030, 0x564ac2c4c080 } +0x564ac2c4c030->Object::~Object { 0x564ac2c4bf40, 0x564ac2c4bf90, 0x564ac2c4bfe0, 0x564ac2c4c030 } +0x564ac2c4bfe0->Object::~Object { 0x564ac2c4bf40, 0x564ac2c4bf90, 0x564ac2c4bfe0 } +0x564ac2c4bf90->Object::~Object { 0x564ac2c4bf40, 0x564ac2c4bf90 } +0x564ac2c4bf40->Object::~Object { 0x564ac2c4bf40 } exception caught: printer end { } - ok ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +Legac++ +./c++.at:572: $PREPARSER ./list stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x5652f8e63f40->Object::Object { } -Next token is token 'a' (0x5652f8e63f40 'a') -Shifting token 'a' (0x5652f8e63f40 'a') +0x564ac2c4bf40->Object::Object { } +Next token is token 'a' (0x564ac2c4bf40 'a') +Shifting token 'a' (0x564ac2c4bf40 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5652f8e63f40 'a') --> $$ = nterm item (0x5652f8e63f40 'a') + $1 = token 'a' (0x564ac2c4bf40 'a') +-> $$ = nterm item (0x564ac2c4bf40 'a') Entering state 10 Stack now 0 10 Reading a token -0x5652f8e63f90->Object::Object { 0x5652f8e63f40 } -Next token is token 'a' (0x5652f8e63f90 'a') -Shifting token 'a' (0x5652f8e63f90 'a') +0x564ac2c4bf90->Object::Object { 0x564ac2c4bf40 } +Next token is token 'a' (0x564ac2c4bf90 'a') +Shifting token 'a' (0x564ac2c4bf90 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5652f8e63f90 'a') --> $$ = nterm item (0x5652f8e63f90 'a') + $1 = token 'a' (0x564ac2c4bf90 'a') +-> $$ = nterm item (0x564ac2c4bf90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x5652f8e63fe0->Object::Object { 0x5652f8e63f40, 0x5652f8e63f90 } -Next token is token 'a' (0x5652f8e63fe0 'a') -Shifting token 'a' (0x5652f8e63fe0 'a') +0x564ac2c4bfe0->Object::Object { 0x564ac2c4bf40, 0x564ac2c4bf90 } +Next token is token 'a' (0x564ac2c4bfe0 'a') +Shifting token 'a' (0x564ac2c4bfe0 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5652f8e63fe0 'a') --> $$ = nterm item (0x5652f8e63fe0 'a') + $1 = token 'a' (0x564ac2c4bfe0 'a') +-> $$ = nterm item (0x564ac2c4bfe0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x5652f8e64030->Object::Object { 0x5652f8e63f40, 0x5652f8e63f90, 0x5652f8e63fe0 } -Next token is token 'a' (0x5652f8e64030 'a') -Shifting token 'a' (0x5652f8e64030 'a') +0x564ac2c4c030->Object::Object { 0x564ac2c4bf40, 0x564ac2c4bf90, 0x564ac2c4bfe0 } +Next token is token 'a' (0x564ac2c4c030 'a') +Shifting token 'a' (0x564ac2c4c030 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5652f8e64030 'a') --> $$ = nterm item (0x5652f8e64030 'a') + $1 = token 'a' (0x564ac2c4c030 'a') +-> $$ = nterm item (0x564ac2c4c030 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x5652f8e64080->Object::Object { 0x5652f8e63f40, 0x5652f8e63f90, 0x5652f8e63fe0, 0x5652f8e64030 } -Next token is token 'p' (0x5652f8e64080 'p'Exception caught: cleaning lookahead and stack -0x5652f8e64080->Object::~Object { 0x5652f8e63f40, 0x5652f8e63f90, 0x5652f8e63fe0, 0x5652f8e64030, 0x5652f8e64080 } -0x5652f8e64030->Object::~Object { 0x5652f8e63f40, 0x5652f8e63f90, 0x5652f8e63fe0, 0x5652f8e64030 } -0x5652f8e63fe0->Object::~Object { 0x5652f8e63f40, 0x5652f8e63f90, 0x5652f8e63fe0 } -0x5652f8e63f90->Object::~Object { 0x5652f8e63f40, 0x5652f8e63f90 } -0x5652f8e63f40->Object::~Object { 0x5652f8e63f40 } +0x564ac2c4c080->Object::Object { 0x564ac2c4bf40, 0x564ac2c4bf90, 0x564ac2c4bfe0, 0x564ac2c4c030 } +Next token is token 'p' (0x564ac2c4c080 'p'Exception caught: cleaning lookahead and stack +0x564ac2c4c080->Object::~Object { 0x564ac2c4bf40, 0x564ac2c4bf90, 0x564ac2c4bfe0, 0x564ac2c4c030, 0x564ac2c4c080 } +0x564ac2c4c030->Object::~Object { 0x564ac2c4bf40, 0x564ac2c4bf90, 0x564ac2c4bfe0, 0x564ac2c4c030 } +0x564ac2c4bfe0->Object::~Object { 0x564ac2c4bf40, 0x564ac2c4bf90, 0x564ac2c4bfe0 } +0x564ac2c4bf90->Object::~Object { 0x564ac2c4bf40, 0x564ac2c4bf90 } +0x564ac2c4bf40->Object::~Object { 0x564ac2c4bf40 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr -./c++.at:1363: $PREPARSER ./input aaaap -stdout: stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +stdout: exception caught: printer -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaae +750. glr-regression.at:1310: testing Leaked semantic values if user action cuts parse: glr.c ... +./glr-regression.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.c glr-regr12.y stderr: -./c++.at:1363: $PREPARSER ./input --debug aaaap +======== Testing with C++ standard flags: '' +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y exception caught: syntax error ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +751. glr-regression.at:1311: testing Leaked semantic values if user action cuts parse: glr.cc ... +./glr-regression.at:1311: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y +./c++.at:1361: $PREPARSER ./input aaaaT +./glr-regression.at:1310: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr12 glr-regr12.c $LIBS +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./glr-regression.at:598: $PREPARSER ./glr-regr4 +stderr: +stderr: +stdout: +./glr-regression.at:945: $PREPARSER ./glr-regr8 +./glr-regression.at:598: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./glr-regression.at:1311: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS +728. glr-regression.at:598: ok +./glr-regression.at:945: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +739. glr-regression.at:945: ok +stdout: +======== Testing with C++ standard flags: '' + +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + +stderr: +stdout: +./c++.at:1362: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1362: $PREPARSER ./input aaaal +stdout: +./c++.at:849: $PREPARSER ./input +stderr: +stderr: +exception caught: yylex +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' +752. glr-regression.at:1312: testing Leaked semantic values if user action cuts parse: glr2.cc ... +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./glr-regression.at:1312: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y +753. glr-regression.at:1445: testing Incorrect lookahead during deterministic GLR: glr.c ... +./c++.at:1362: $PREPARSER ./input i +./c++.at:855: $PREPARSER ./input +./glr-regression.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.c glr-regr13.y +stderr: +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: initial-action +stdout: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1102: $PREPARSER ./glr-regr10 +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +stderr: +./c++.at:1362: $PREPARSER ./input aaaap +./c++.at:941: $PREPARSER ./input +./glr-regression.at:1102: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $here/modern +744. glr-regression.at:1102: ./c++.at:1362: $PREPARSER ./input --debug aaaap +======== Testing with C++ standard flags: '' + ok +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stdout: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffddadaaecf->Object::Object { } +0x7ffddadaaf90->Object::Object { 0x7ffddadaaecf } +0x7ffddadaaecf->Object::~Object { 0x7ffddadaaecf, 0x7ffddadaaf90 } +Next token is token 'a' (0x7ffddadaaf90 'a') +0x7ffddadaaf00->Object::Object { 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::Object { 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::~Object { 0x7ffddadaaeb7, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaf90->Object::~Object { 0x7ffddadaaf00, 0x7ffddadaaf90 } +Shifting token 'a' (0x7ffddadaaf00 'a') +0x556f028ca2e0->Object::Object { 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::Object { 0x556f028ca2e0, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::~Object { 0x556f028ca2e0, 0x7ffddadaae8f, 0x7ffddadaaf00 } +0x7ffddadaaf00->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaf00 } +Entering state 2 +Stack now 0 2 +0x7ffddadaafb0->Object::Object { 0x556f028ca2e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556f028ca2e0 'a') +-> $$ = nterm item (0x7ffddadaafb0 'a') +0x556f028ca2e0->Object::~Object { 0x556f028ca2e0, 0x7ffddadaafb0 } +0x556f028ca2e0->Object::Object { 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::Object { 0x556f028ca2e0, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaf68, 0x7ffddadaafb0 } +0x7ffddadaafb0->Object::~Object { 0x556f028ca2e0, 0x7ffddadaafb0 } +Entering state 11 +Stack now 0 11 +Reading a token +0x7ffddadaaecf->Object::Object { 0x556f028ca2e0 } +0x7ffddadaaf90->Object::Object { 0x556f028ca2e0, 0x7ffddadaaecf } +0x7ffddadaaecf->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaecf, 0x7ffddadaaf90 } +Next token is token 'a' (0x7ffddadaaf90 'a') +0x7ffddadaaf00->Object::Object { 0x556f028ca2e0, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::Object { 0x556f028ca2e0, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaeb7, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaf90->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaf00, 0x7ffddadaaf90 } +Shifting token 'a' (0x7ffddadaaf00 'a') +0x556f028ca300->Object::Object { 0x556f028ca2e0, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaae8f, 0x7ffddadaaf00 } +0x7ffddadaaf00->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf00 } +Entering state 2 +Stack now 0 11 2 +0x7ffddadaafb0->Object::Object { 0x556f028ca2e0, 0x556f028ca300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556f028ca300 'a') +-> $$ = nterm item (0x7ffddadaafb0 'a') +0x556f028ca300->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaafb0 } +0x556f028ca300->Object::Object { 0x556f028ca2e0, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf68, 0x7ffddadaafb0 } +0x7ffddadaafb0->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaafb0 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0x7ffddadaaecf->Object::Object { 0x556f028ca2e0, 0x556f028ca300 } +0x7ffddadaaf90->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaecf } +0x7ffddadaaecf->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaecf, 0x7ffddadaaf90 } +Next token is token 'a' (0x7ffddadaaf90 'a') +0x7ffddadaaf00->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaeb7, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaf90->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf00, 0x7ffddadaaf90 } +Shifting token 'a' (0x7ffddadaaf00 'a') +0x556f028ca320->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaae8f, 0x7ffddadaaf00 } +0x7ffddadaaf00->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf00 } +Entering state 2 +Stack now 0 11 11 2 +0x7ffddadaafb0->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556f028ca320 'a') +-> $$ = nterm item (0x7ffddadaafb0 'a') +0x556f028ca320->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaafb0 } +0x556f028ca320->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf68, 0x7ffddadaafb0 } +0x7ffddadaafb0->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaafb0 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x7ffddadaaecf->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320 } +0x7ffddadaaf90->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaecf } +0x7ffddadaaecf->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaecf, 0x7ffddadaaf90 } +Next token is token 'a' (0x7ffddadaaf90 'a') +0x7ffddadaaf00->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaeb7, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaf90->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf00, 0x7ffddadaaf90 } +Shifting token 'a' (0x7ffddadaaf00 'a') +0x556f028ca340->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaae8f, 0x7ffddadaaf00 } +0x7ffddadaaf00->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaf00 } +Entering state 2 +Stack now 0 11 11 11 2 +0x7ffddadaafb0->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556f028ca340 'a') +-> $$ = nterm item (0x7ffddadaafb0 'a') +0x556f028ca340->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaafb0 } +0x556f028ca340->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaf68, 0x7ffddadaafb0 } +0x7ffddadaafb0->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaafb0 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x7ffddadaaecf->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340 } +0x7ffddadaaf90->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaecf } +0x7ffddadaaecf->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaecf, 0x7ffddadaaf90 } +Next token is token 'p' (0x7ffddadaaf90 'p'Exception caught: cleaning lookahead and stack +0x556f028ca340->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaf90 } +0x556f028ca320->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf90 } +0x556f028ca300->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf90 } +0x556f028ca2e0->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaf90 } +0x7ffddadaaf90->Object::~Object { 0x7ffddadaaf90 } +exception caught: printer +end { } +./glr-regression.at:1445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr13 glr-regr13.c $LIBS +Legac++ +./c++.at:574: $PREPARSER ./list +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffddadaaecf->Object::Object { } +0x7ffddadaaf90->Object::Object { 0x7ffddadaaecf } +0x7ffddadaaecf->Object::~Object { 0x7ffddadaaecf, 0x7ffddadaaf90 } +Next token is token 'a' (0x7ffddadaaf90 'a') +0x7ffddadaaf00->Object::Object { 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::Object { 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::~Object { 0x7ffddadaaeb7, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaf90->Object::~Object { 0x7ffddadaaf00, 0x7ffddadaaf90 } +Shifting token 'a' (0x7ffddadaaf00 'a') +0x556f028ca2e0->Object::Object { 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::Object { 0x556f028ca2e0, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::~Object { 0x556f028ca2e0, 0x7ffddadaae8f, 0x7ffddadaaf00 } +0x7ffddadaaf00->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaf00 } +Entering state 2 +Stack now 0 2 +0x7ffddadaafb0->Object::Object { 0x556f028ca2e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556f028ca2e0 'a') +-> $$ = nterm item (0x7ffddadaafb0 'a') +0x556f028ca2e0->Object::~Object { 0x556f028ca2e0, 0x7ffddadaafb0 } +0x556f028ca2e0->Object::Object { 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::Object { 0x556f028ca2e0, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaf68, 0x7ffddadaafb0 } +0x7ffddadaafb0->Object::~Object { 0x556f028ca2e0, 0x7ffddadaafb0 } +Entering state 11 +Stack now 0 11 +Reading a token +0x7ffddadaaecf->Object::Object { 0x556f028ca2e0 } +0x7ffddadaaf90->Object::Object { 0x556f028ca2e0, 0x7ffddadaaecf } +0x7ffddadaaecf->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaecf, 0x7ffddadaaf90 } +Next token is token 'a' (0x7ffddadaaf90 'a') +0x7ffddadaaf00->Object::Object { 0x556f028ca2e0, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::Object { 0x556f028ca2e0, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaeb7, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaf90->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaf00, 0x7ffddadaaf90 } +Shifting token 'a' (0x7ffddadaaf00 'a') +0x556f028ca300->Object::Object { 0x556f028ca2e0, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaae8f, 0x7ffddadaaf00 } +0x7ffddadaaf00->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf00 } +Entering state 2 +Stack now 0 11 2 +0x7ffddadaafb0->Object::Object { 0x556f028ca2e0, 0x556f028ca300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556f028ca300 'a') +-> $$ = nterm item (0x7ffddadaafb0 'a') +0x556f028ca300->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaafb0 } +0x556f028ca300->Object::Object { 0x556f028ca2e0, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf68, 0x7ffddadaafb0 } +0x7ffddadaafb0->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaafb0 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0x7ffddadaaecf->Object::Object { 0x556f028ca2e0, 0x556f028ca300 } +0x7ffddadaaf90->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaecf } +0x7ffddadaaecf->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaecf, 0x7ffddadaaf90 } +Next token is token 'a' (0x7ffddadaaf90 'a') +0x7ffddadaaf00->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaeb7, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaf90->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf00, 0x7ffddadaaf90 } +Shifting token 'a' (0x7ffddadaaf00 'a') +0x556f028ca320->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaae8f, 0x7ffddadaaf00 } +0x7ffddadaaf00->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf00 } +Entering state 2 +Stack now 0 11 11 2 +0x7ffddadaafb0->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556f028ca320 'a') +-> $$ = nterm item (0x7ffddadaafb0 'a') +0x556f028ca320->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaafb0 } +0x556f028ca320->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf68, 0x7ffddadaafb0 } +0x7ffddadaafb0->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaafb0 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x7ffddadaaecf->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320 } +0x7ffddadaaf90->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaecf } +0x7ffddadaaecf->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaecf, 0x7ffddadaaf90 } +Next token is token 'a' (0x7ffddadaaf90 'a') +0x7ffddadaaf00->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaeb7->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaeb7, 0x7ffddadaaf00, 0x7ffddadaaf90 } +0x7ffddadaaf90->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf00, 0x7ffddadaaf90 } +Shifting token 'a' (0x7ffddadaaf00 'a') +0x556f028ca340->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaf00 } +0x7ffddadaae8f->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaae8f, 0x7ffddadaaf00 } +0x7ffddadaaf00->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaf00 } +Entering state 2 +Stack now 0 11 11 11 2 +0x7ffddadaafb0->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556f028ca340 'a') +-> $$ = nterm item (0x7ffddadaafb0 'a') +0x556f028ca340->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaafb0 } +0x556f028ca340->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaafb0 } +0x7ffddadaaf68->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaf68, 0x7ffddadaafb0 } +0x7ffddadaafb0->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaafb0 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x7ffddadaaecf->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340 } +0x7ffddadaaf90->Object::Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaecf } +0x7ffddadaaecf->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaecf, 0x7ffddadaaf90 } +Next token is token 'p' (0x7ffddadaaf90 'p'Exception caught: cleaning lookahead and stack +0x556f028ca340->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x556f028ca340, 0x7ffddadaaf90 } +0x556f028ca320->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x556f028ca320, 0x7ffddadaaf90 } +0x556f028ca300->Object::~Object { 0x556f028ca2e0, 0x556f028ca300, 0x7ffddadaaf90 } +0x556f028ca2e0->Object::~Object { 0x556f028ca2e0, 0x7ffddadaaf90 } +0x7ffddadaaf90->Object::~Object { 0x7ffddadaaf90 } +exception caught: printer +end { } +./c++.at:1362: grep '^exception caught: printer$' stderr +./glr-regression.at:1312: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stdout: +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae +stderr: +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +754. glr-regression.at:1446: testing Incorrect lookahead during deterministic GLR: glr.cc ... +stderr: +./glr-regression.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./glr-regression.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS +stderr: +stderr: +stdout: +./glr-regression.at:1037: $PREPARSER ./glr-regr9 +stdout: +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +memory exhausted +./glr-regression.at:1037: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaal +742. glr-regression.at:1037: ok +stderr: +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./c++.at:1363: $PREPARSER ./input i +stderr: +stderr: +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./glr-regression.at:1174: $PREPARSER ./glr-regr11 +stderr: +./glr-regression.at:1174: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap +747. glr-regression.at:1174: ok +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +755. glr-regression.at:1447: testing Incorrect lookahead during deterministic GLR: glr2.cc ... +./c++.at:1363: $PREPARSER ./input --debug aaaap +./glr-regression.at:1447: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffd6b77b34f->Object::Object { } -0x7ffd6b77b410->Object::Object { 0x7ffd6b77b34f } -0x7ffd6b77b34f->Object::~Object { 0x7ffd6b77b34f, 0x7ffd6b77b410 } -Next token is token 'a' (0x7ffd6b77b410 'a') -0x7ffd6b77b380->Object::Object { 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::Object { 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::~Object { 0x7ffd6b77b337, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b410->Object::~Object { 0x7ffd6b77b380, 0x7ffd6b77b410 } -Shifting token 'a' (0x7ffd6b77b380 'a') -0x5642428ee2e0->Object::Object { 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b30f, 0x7ffd6b77b380 } -0x7ffd6b77b380->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b380 } +0x7ffea225cdef->Object::Object { } +0x7ffea225ceb0->Object::Object { 0x7ffea225cdef } +0x7ffea225cdef->Object::~Object { 0x7ffea225cdef, 0x7ffea225ceb0 } +Next token is token 'a' (0x7ffea225ceb0 'a') +0x7ffea225ce20->Object::Object { 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::Object { 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::~Object { 0x7ffea225cdd7, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225ceb0->Object::~Object { 0x7ffea225ce20, 0x7ffea225ceb0 } +Shifting token 'a' (0x7ffea225ce20 'a') +0x5610732ff2e0->Object::Object { 0x7ffea225ce20 } +0x7ffea225cdaf->Object::Object { 0x5610732ff2e0, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::~Object { 0x5610732ff2e0, 0x7ffea225cdaf, 0x7ffea225ce20 } +0x7ffea225ce20->Object::~Object { 0x5610732ff2e0, 0x7ffea225ce20 } Entering state 1 Stack now 0 1 -0x7ffd6b77b430->Object::Object { 0x5642428ee2e0 } +0x7ffea225ced0->Object::Object { 0x5610732ff2e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5642428ee2e0 'a') --> $$ = nterm item (0x7ffd6b77b430 'a') -0x5642428ee2e0->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b430 } -0x5642428ee2e0->Object::Object { 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b3e8, 0x7ffd6b77b430 } -0x7ffd6b77b430->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b430 } + $1 = token 'a' (0x5610732ff2e0 'a') +-> $$ = nterm item (0x7ffea225ced0 'a') +0x5610732ff2e0->Object::~Object { 0x5610732ff2e0, 0x7ffea225ced0 } +0x5610732ff2e0->Object::Object { 0x7ffea225ced0 } +0x7ffea225ce88->Object::Object { 0x5610732ff2e0, 0x7ffea225ced0 } +0x7ffea225ce88->Object::~Object { 0x5610732ff2e0, 0x7ffea225ce88, 0x7ffea225ced0 } +0x7ffea225ced0->Object::~Object { 0x5610732ff2e0, 0x7ffea225ced0 } Entering state 10 Stack now 0 10 Reading a token -0x7ffd6b77b34f->Object::Object { 0x5642428ee2e0 } -0x7ffd6b77b410->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b34f } -0x7ffd6b77b34f->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b34f, 0x7ffd6b77b410 } -Next token is token 'a' (0x7ffd6b77b410 'a') -0x7ffd6b77b380->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b337, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b410->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b380, 0x7ffd6b77b410 } -Shifting token 'a' (0x7ffd6b77b380 'a') -0x5642428ee300->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b30f, 0x7ffd6b77b380 } -0x7ffd6b77b380->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b380 } +0x7ffea225cdef->Object::Object { 0x5610732ff2e0 } +0x7ffea225ceb0->Object::Object { 0x5610732ff2e0, 0x7ffea225cdef } +0x7ffea225cdef->Object::~Object { 0x5610732ff2e0, 0x7ffea225cdef, 0x7ffea225ceb0 } +Next token is token 'a' (0x7ffea225ceb0 'a') +0x7ffea225ce20->Object::Object { 0x5610732ff2e0, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::Object { 0x5610732ff2e0, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::~Object { 0x5610732ff2e0, 0x7ffea225cdd7, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225ceb0->Object::~Object { 0x5610732ff2e0, 0x7ffea225ce20, 0x7ffea225ceb0 } +Shifting token 'a' (0x7ffea225ce20 'a') +0x5610732ff300->Object::Object { 0x5610732ff2e0, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225cdaf, 0x7ffea225ce20 } +0x7ffea225ce20->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce20 } Entering state 1 Stack now 0 10 1 -0x7ffd6b77b430->Object::Object { 0x5642428ee2e0, 0x5642428ee300 } +0x7ffea225ced0->Object::Object { 0x5610732ff2e0, 0x5610732ff300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5642428ee300 'a') --> $$ = nterm item (0x7ffd6b77b430 'a') -0x5642428ee300->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b430 } -0x5642428ee300->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b3e8, 0x7ffd6b77b430 } -0x7ffd6b77b430->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b430 } + $1 = token 'a' (0x5610732ff300 'a') +-> $$ = nterm item (0x7ffea225ced0 'a') +0x5610732ff300->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ced0 } +0x5610732ff300->Object::Object { 0x5610732ff2e0, 0x7ffea225ced0 } +0x7ffea225ce88->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ced0 } +0x7ffea225ce88->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce88, 0x7ffea225ced0 } +0x7ffea225ced0->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ced0 } Entering state 10 Stack now 0 10 10 Reading a token -0x7ffd6b77b34f->Object::Object { 0x5642428ee2e0, 0x5642428ee300 } -0x7ffd6b77b410->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b34f } -0x7ffd6b77b34f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b34f, 0x7ffd6b77b410 } -Next token is token 'a' (0x7ffd6b77b410 'a') -0x7ffd6b77b380->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b337, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b410->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b380, 0x7ffd6b77b410 } -Shifting token 'a' (0x7ffd6b77b380 'a') -0x5642428ee320->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b30f, 0x7ffd6b77b380 } -0x7ffd6b77b380->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b380 } +0x7ffea225cdef->Object::Object { 0x5610732ff2e0, 0x5610732ff300 } +0x7ffea225ceb0->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225cdef } +0x7ffea225cdef->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225cdef, 0x7ffea225ceb0 } +Next token is token 'a' (0x7ffea225ceb0 'a') +0x7ffea225ce20->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225cdd7, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225ceb0->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce20, 0x7ffea225ceb0 } +Shifting token 'a' (0x7ffea225ce20 'a') +0x5610732ff320->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225cdaf, 0x7ffea225ce20 } +0x7ffea225ce20->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce20 } Entering state 1 Stack now 0 10 10 1 -0x7ffd6b77b430->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320 } +0x7ffea225ced0->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5642428ee320 'a') --> $$ = nterm item (0x7ffd6b77b430 'a') -0x5642428ee320->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b430 } -0x5642428ee320->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b3e8, 0x7ffd6b77b430 } -0x7ffd6b77b430->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b430 } + $1 = token 'a' (0x5610732ff320 'a') +-> $$ = nterm item (0x7ffea225ced0 'a') +0x5610732ff320->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ced0 } +0x5610732ff320->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ced0 } +0x7ffea225ce88->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ced0 } +0x7ffea225ce88->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce88, 0x7ffea225ced0 } +0x7ffea225ced0->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ced0 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffd6b77b34f->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320 } -0x7ffd6b77b410->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b34f } -0x7ffd6b77b34f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b34f, 0x7ffd6b77b410 } -Next token is token 'a' (0x7ffd6b77b410 'a') -0x7ffd6b77b380->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b337, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b410->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b380, 0x7ffd6b77b410 } -Shifting token 'a' (0x7ffd6b77b380 'a') -0x5642428ee340->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b30f, 0x7ffd6b77b380 } -0x7ffd6b77b380->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b380 } +0x7ffea225cdef->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320 } +0x7ffea225ceb0->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225cdef } +0x7ffea225cdef->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225cdef, 0x7ffea225ceb0 } +Next token is token 'a' (0x7ffea225ceb0 'a') +0x7ffea225ce20->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225cdd7, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225ceb0->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce20, 0x7ffea225ceb0 } +Shifting token 'a' (0x7ffea225ce20 'a') +0x5610732ff340->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225cdaf, 0x7ffea225ce20 } +0x7ffea225ce20->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ce20 } Entering state 1 Stack now 0 10 10 10 1 -0x7ffd6b77b430->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340 } +0x7ffea225ced0->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5642428ee340 'a') --> $$ = nterm item (0x7ffd6b77b430 'a') -0x5642428ee340->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b430 } -0x5642428ee340->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b3e8, 0x7ffd6b77b430 } -0x7ffd6b77b430->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b430 } + $1 = token 'a' (0x5610732ff340 'a') +-> $$ = nterm item (0x7ffea225ced0 'a') +0x5610732ff340->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ced0 } +0x5610732ff340->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ced0 } +0x7ffea225ce88->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ced0 } +0x7ffea225ce88->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ce88, 0x7ffea225ced0 } +0x7ffea225ced0->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ced0 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffd6b77b34f->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340 } -0x7ffd6b77b410->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b34f } -0x7ffd6b77b34f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b34f, 0x7ffd6b77b410 } -Next token is token 'p' (0x7ffd6b77b410 'p'Exception caught: cleaning lookahead and stack -0x5642428ee340->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b410 } -0x5642428ee320->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b410 } -0x5642428ee300->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b410 } -0x5642428ee2e0->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b410 } -0x7ffd6b77b410->Object::~Object { 0x7ffd6b77b410 } +0x7ffea225cdef->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340 } +0x7ffea225ceb0->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225cdef } +0x7ffea225cdef->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225cdef, 0x7ffea225ceb0 } +Next token is token 'p' (0x7ffea225ceb0 'p'Exception caught: cleaning lookahead and stack +0x5610732ff340->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ceb0 } +0x5610732ff320->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ceb0 } +0x5610732ff300->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ceb0 } +0x5610732ff2e0->Object::~Object { 0x5610732ff2e0, 0x7ffea225ceb0 } +0x7ffea225ceb0->Object::~Object { 0x7ffea225ceb0 } exception caught: printer end { } ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE stderr: -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffd6b77b34f->Object::Object { } -0x7ffd6b77b410->Object::Object { 0x7ffd6b77b34f } -0x7ffd6b77b34f->Object::~Object { 0x7ffd6b77b34f, 0x7ffd6b77b410 } -Next token is token 'a' (0x7ffd6b77b410 'a') -0x7ffd6b77b380->Object::Object { 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::Object { 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::~Object { 0x7ffd6b77b337, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b410->Object::~Object { 0x7ffd6b77b380, 0x7ffd6b77b410 } -Shifting token 'a' (0x7ffd6b77b380 'a') -0x5642428ee2e0->Object::Object { 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b30f, 0x7ffd6b77b380 } -0x7ffd6b77b380->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b380 } +0x7ffea225cdef->Object::Object { } +0x7ffea225ceb0->Object::Object { 0x7ffea225cdef } +0x7ffea225cdef->Object::~Object { 0x7ffea225cdef, 0x7ffea225ceb0 } +Next token is token 'a' (0x7ffea225ceb0 'a') +0x7ffea225ce20->Object::Object { 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::Object { 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::~Object { 0x7ffea225cdd7, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225ceb0->Object::~Object { 0x7ffea225ce20, 0x7ffea225ceb0 } +Shifting token 'a' (0x7ffea225ce20 'a') +0x5610732ff2e0->Object::Object { 0x7ffea225ce20 } +0x7ffea225cdaf->Object::Object { 0x5610732ff2e0, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::~Object { 0x5610732ff2e0, 0x7ffea225cdaf, 0x7ffea225ce20 } +0x7ffea225ce20->Object::~Object { 0x5610732ff2e0, 0x7ffea225ce20 } Entering state 1 Stack now 0 1 -0x7ffd6b77b430->Object::Object { 0x5642428ee2e0 } +0x7ffea225ced0->Object::Object { 0x5610732ff2e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5642428ee2e0 'a') --> $$ = nterm item (0x7ffd6b77b430 'a') -0x5642428ee2e0->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b430 } -0x5642428ee2e0->Object::Object { 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b3e8, 0x7ffd6b77b430 } -0x7ffd6b77b430->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b430 } + $1 = token 'a' (0x5610732ff2e0 'a') +-> $$ = nterm item (0x7ffea225ced0 'a') +0x5610732ff2e0->Object::~Object { 0x5610732ff2e0, 0x7ffea225ced0 } +0x5610732ff2e0->Object::Object { 0x7ffea225ced0 } +0x7ffea225ce88->Object::Object { 0x5610732ff2e0, 0x7ffea225ced0 } +0x7ffea225ce88->Object::~Object { 0x5610732ff2e0, 0x7ffea225ce88, 0x7ffea225ced0 } +0x7ffea225ced0->Object::~Object { 0x5610732ff2e0, 0x7ffea225ced0 } Entering state 10 Stack now 0 10 Reading a token -0x7ffd6b77b34f->Object::Object { 0x5642428ee2e0 } -0x7ffd6b77b410->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b34f } -0x7ffd6b77b34f->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b34f, 0x7ffd6b77b410 } -Next token is token 'a' (0x7ffd6b77b410 'a') -0x7ffd6b77b380->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b337, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b410->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b380, 0x7ffd6b77b410 } -Shifting token 'a' (0x7ffd6b77b380 'a') -0x5642428ee300->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b30f, 0x7ffd6b77b380 } -0x7ffd6b77b380->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b380 } +0x7ffea225cdef->Object::Object { 0x5610732ff2e0 } +0x7ffea225ceb0->Object::Object { 0x5610732ff2e0, 0x7ffea225cdef } +0x7ffea225cdef->Object::~Object { 0x5610732ff2e0, 0x7ffea225cdef, 0x7ffea225ceb0 } +Next token is token 'a' (0x7ffea225ceb0 'a') +0x7ffea225ce20->Object::Object { 0x5610732ff2e0, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::Object { 0x5610732ff2e0, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::~Object { 0x5610732ff2e0, 0x7ffea225cdd7, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225ceb0->Object::~Object { 0x5610732ff2e0, 0x7ffea225ce20, 0x7ffea225ceb0 } +Shifting token 'a' (0x7ffea225ce20 'a') +0x5610732ff300->Object::Object { 0x5610732ff2e0, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225cdaf, 0x7ffea225ce20 } +0x7ffea225ce20->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce20 } Entering state 1 Stack now 0 10 1 -0x7ffd6b77b430->Object::Object { 0x5642428ee2e0, 0x5642428ee300 } +0x7ffea225ced0->Object::Object { 0x5610732ff2e0, 0x5610732ff300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5642428ee300 'a') --> $$ = nterm item (0x7ffd6b77b430 'a') -0x5642428ee300->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b430 } -0x5642428ee300->Object::Object { 0x5642428ee2e0, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b3e8, 0x7ffd6b77b430 } -0x7ffd6b77b430->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b430 } + $1 = token 'a' (0x5610732ff300 'a') +-> $$ = nterm item (0x7ffea225ced0 'a') +0x5610732ff300->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ced0 } +0x5610732ff300->Object::Object { 0x5610732ff2e0, 0x7ffea225ced0 } +0x7ffea225ce88->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ced0 } +0x7ffea225ce88->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce88, 0x7ffea225ced0 } +0x7ffea225ced0->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ced0 } Entering state 10 Stack now 0 10 10 Reading a token -0x7ffd6b77b34f->Object::Object { 0x5642428ee2e0, 0x5642428ee300 } -0x7ffd6b77b410->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b34f } -0x7ffd6b77b34f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b34f, 0x7ffd6b77b410 } -Next token is token 'a' (0x7ffd6b77b410 'a') -0x7ffd6b77b380->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b337, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b410->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b380, 0x7ffd6b77b410 } -Shifting token 'a' (0x7ffd6b77b380 'a') -0x5642428ee320->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b30f, 0x7ffd6b77b380 } -0x7ffd6b77b380->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b380 } +0x7ffea225cdef->Object::Object { 0x5610732ff2e0, 0x5610732ff300 } +0x7ffea225ceb0->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225cdef } +0x7ffea225cdef->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225cdef, 0x7ffea225ceb0 } +Next token is token 'a' (0x7ffea225ceb0 'a') +0x7ffea225ce20->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225cdd7, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225ceb0->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce20, 0x7ffea225ceb0 } +Shifting token 'a' (0x7ffea225ce20 'a') +0x5610732ff320->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225cdaf, 0x7ffea225ce20 } +0x7ffea225ce20->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce20 } Entering state 1 Stack now 0 10 10 1 -0x7ffd6b77b430->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320 } +0x7ffea225ced0->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5642428ee320 'a') --> $$ = nterm item (0x7ffd6b77b430 'a') -0x5642428ee320->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b430 } -0x5642428ee320->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b3e8, 0x7ffd6b77b430 } -0x7ffd6b77b430->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b430 } + $1 = token 'a' (0x5610732ff320 'a') +-> $$ = nterm item (0x7ffea225ced0 'a') +0x5610732ff320->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ced0 } +0x5610732ff320->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ced0 } +0x7ffea225ce88->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ced0 } +0x7ffea225ce88->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce88, 0x7ffea225ced0 } +0x7ffea225ced0->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ced0 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffd6b77b34f->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320 } -0x7ffd6b77b410->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b34f } -0x7ffd6b77b34f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b34f, 0x7ffd6b77b410 } -Next token is token 'a' (0x7ffd6b77b410 'a') -0x7ffd6b77b380->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b337->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b337, 0x7ffd6b77b380, 0x7ffd6b77b410 } -0x7ffd6b77b410->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b380, 0x7ffd6b77b410 } -Shifting token 'a' (0x7ffd6b77b380 'a') -0x5642428ee340->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b380 } -0x7ffd6b77b30f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b30f, 0x7ffd6b77b380 } -0x7ffd6b77b380->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b380 } +0x7ffea225cdef->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320 } +0x7ffea225ceb0->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225cdef } +0x7ffea225cdef->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225cdef, 0x7ffea225ceb0 } +Next token is token 'a' (0x7ffea225ceb0 'a') +0x7ffea225ce20->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225cdd7->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225cdd7, 0x7ffea225ce20, 0x7ffea225ceb0 } +0x7ffea225ceb0->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce20, 0x7ffea225ceb0 } +Shifting token 'a' (0x7ffea225ce20 'a') +0x5610732ff340->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ce20 } +0x7ffea225cdaf->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225cdaf, 0x7ffea225ce20 } +0x7ffea225ce20->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ce20 } Entering state 1 Stack now 0 10 10 10 1 -0x7ffd6b77b430->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340 } +0x7ffea225ced0->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5642428ee340 'a') --> $$ = nterm item (0x7ffd6b77b430 'a') -0x5642428ee340->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b430 } -0x5642428ee340->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b430 } -0x7ffd6b77b3e8->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b3e8, 0x7ffd6b77b430 } -0x7ffd6b77b430->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b430 } + $1 = token 'a' (0x5610732ff340 'a') +-> $$ = nterm item (0x7ffea225ced0 'a') +0x5610732ff340->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ced0 } +0x5610732ff340->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ced0 } +0x7ffea225ce88->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ced0 } +0x7ffea225ce88->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ce88, 0x7ffea225ced0 } +0x7ffea225ced0->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ced0 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffd6b77b34f->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340 } -0x7ffd6b77b410->Object::Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b34f } -0x7ffd6b77b34f->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b34f, 0x7ffd6b77b410 } -Next token is token 'p' (0x7ffd6b77b410 'p'Exception caught: cleaning lookahead and stack -0x5642428ee340->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x5642428ee340, 0x7ffd6b77b410 } -0x5642428ee320->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x5642428ee320, 0x7ffd6b77b410 } -0x5642428ee300->Object::~Object { 0x5642428ee2e0, 0x5642428ee300, 0x7ffd6b77b410 } -0x5642428ee2e0->Object::~Object { 0x5642428ee2e0, 0x7ffd6b77b410 } -0x7ffd6b77b410->Object::~Object { 0x7ffd6b77b410 } +0x7ffea225cdef->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340 } +0x7ffea225ceb0->Object::Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225cdef } +0x7ffea225cdef->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225cdef, 0x7ffea225ceb0 } +Next token is token 'p' (0x7ffea225ceb0 'p'Exception caught: cleaning lookahead and stack +0x5610732ff340->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x5610732ff340, 0x7ffea225ceb0 } +0x5610732ff320->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x5610732ff320, 0x7ffea225ceb0 } +0x5610732ff300->Object::~Object { 0x5610732ff2e0, 0x5610732ff300, 0x7ffea225ceb0 } +0x5610732ff2e0->Object::~Object { 0x5610732ff2e0, 0x7ffea225ceb0 } +0x7ffea225ceb0->Object::~Object { 0x7ffea225ceb0 } exception caught: printer end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: grep '^exception caught: printer$' stderr stdout: +stderr: +stderr: exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaaT +stdout: +./glr-regression.at:672: $PREPARSER ./glr-regr5 +stderr: ./c++.at:1363: $PREPARSER ./input aaaae +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:672: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1065: $PREPARSER ./input < in stderr: -exception caught: syntax error stderr: +756. glr-regression.at:1678: testing Incorrect lookahead during nondeterministic GLR: glr.c ... +./glr-regression.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.c glr-regr14.y +./glr-regression.at:1447: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR -736. glr-regression.at:844: testing Duplicated user destructor for lookahead: glr.cc ... stderr: -./glr-regression.at:844: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y +./c++.at:1065: $PREPARSER ./input < in ./c++.at:1363: $PREPARSER ./input aaaaE -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stderr: +./c++.at:92: $PREPARSER ./input +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +731. glr-regression.at:672: ok +./c++.at:1065: $PREPARSER ./input < in +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaR +stderr: +./glr-regression.at:1678: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr14 glr-regr14.c $LIBS ======== Testing with C++ standard flags: '' -stdout: -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stdout: -./glr-regression.at:593: $PREPARSER ./glr-regr4 +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in + stderr: -./glr-regression.at:670: $PREPARSER ./glr-regr5 +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1064: $PREPARSER ./input < in stderr: stderr: -./c++.at:1363: $PREPARSER ./input aaaaT +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +757. glr-regression.at:1679: testing Incorrect lookahead during nondeterministic GLR: glr.cc ... +./glr-regression.at:740: $PREPARSER ./glr-regr6 +./glr-regression.at:1679: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y +stderr: Ambiguity detected. Option 1, start -> @@ -263990,86 +263171,150 @@ 'a' syntax is ambiguous -./glr-regression.at:670: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:571: $here/modern -./glr-regression.at:593: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +./glr-regression.at:740: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Legac++ -./c++.at:571: $PREPARSER ./list -729. glr-regression.at:670: ok -724. glr-regression.at:593: ok -./c++.at:1363: $PREPARSER ./input aaaaR +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +734. glr-regression.at:740: ok +./c++.at:1555: $PREPARSER ./test stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./glr-regression.at:844: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS -737. glr-regression.at:845: testing Duplicated user destructor for lookahead: glr2.cc ... -./glr-regression.at:845: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y -738. glr-regression.at:944: testing Incorrectly initialized location for empty right-hand side in GLR: glr.c ... -./glr-regression.at:944: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.c glr-regr8.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./glr-regression.at:845: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS -./glr-regression.at:944: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr8 glr-regr8.c $LIBS +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./glr-regression.at:1103: $PREPARSER ./glr-regr10 +stderr: +./glr-regression.at:1103: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1679: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS +745. glr-regression.at:1103: ok +758. glr-regression.at:1680: testing Incorrect lookahead during nondeterministic GLR: glr2.cc ... +./glr-regression.at:1680: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y + +stderr: stderr: -./glr-regression.at:738: $PREPARSER ./glr-regr6 +stdout: +./glr-regression.at:1310: $PREPARSER ./glr-regr12 stdout: stderr: -Ambiguity detected. -Option 1, - start -> - 'a' +./c++.at:1555: ./check +./glr-regression.at:1310: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +750. glr-regression.at:1310: ok -Option 2, - start -> - 'a' +stderr: +stdout: +759. glr-regression.at:1785: testing Leaked semantic values when reporting ambiguity: glr.c ... +./glr-regression.at:1785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.c glr-regr15.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./glr-regression.at:1680: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS +760. glr-regression.at:1786: testing Leaked semantic values when reporting ambiguity: glr.cc ... +./glr-regression.at:1786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y +./glr-regression.at:1785: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr15 glr-regr15.c $LIBS +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./glr-regression.at:1786: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS +stderr: +stdout: +./glr-regression.at:845: $PREPARSER ./glr-regr7 +stderr: +stdout: +./glr-regression.at:1445: $PREPARSER ./glr-regr13 +stderr: +stderr: +stderr: +memory exhausted +./glr-regression.at:845: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./glr-regression.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./glr-regression.at:1175: $PREPARSER ./glr-regr11 +stderr: +stdout: +./c++.at:659: $PREPARSER ./input +stderr: +stdout: +./c++.at:566: $here/modern +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stdout: +stderr: +Legac++ +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:566: $PREPARSER ./list +753. glr-regression.at:1445: ok +stderr: +737. glr-regression.at:845: ok +./glr-regression.at:1175: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax is ambiguous -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./glr-regression.at:738: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -732. glr-regression.at:738: ok stderr: +stdout: +./c++.at:235: $PREPARSER ./list +762. glr-regression.at:1860: testing Leaked lookahead after nondeterministic parse syntax error: glr.c ... +761. glr-regression.at:1787: testing Leaked semantic values when reporting ambiguity: glr2.cc ... +./glr-regression.at:1860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.c glr-regr16.y +./glr-regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y stderr: stdout: +stderr: stdout: -./c++.at:568: $here/modern ======== Testing with C++ standard flags: '' ./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1860: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr16 glr-regr16.c $LIBS +stderr: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: -Legac++ -./c++.at:568: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -264092,80 +263337,147 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -739. glr-regression.at:945: testing Incorrectly initialized location for empty right-hand side in GLR: glr.cc ... -./glr-regression.at:945: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:849: $PREPARSER ./input +748. glr-regression.at:1175: ok +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./glr-regression.at:945: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS +./c++.at:855: $PREPARSER ./input + +./glr-regression.at:1787: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./glr-regression.at:207: $PREPARSER ./glr-regr1 BPBPB +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: stderr: -./glr-regression.at:207: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -716. glr-regression.at:207: ok -./c++.at:235: $PREPARSER ./list stderr: +stdout: +stdout: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1311: $PREPARSER ./glr-regr12 +./glr-regression.at:1678: $PREPARSER ./glr-regr14 +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +./c++.at:1360: $PREPARSER ./input aaaas stderr: -======== Testing with C++ standard flags: '' -./glr-regression.at:843: $PREPARSER ./glr-regr7 -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS - stdout: +./existing.at:1460: $PREPARSER ./input +763. glr-regression.at:1861: testing Leaked lookahead after nondeterministic parse syntax error: glr.cc ... +./glr-regression.at:1861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y stderr: -memory exhausted -./glr-regression.at:843: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: ./c++.at:941: $PREPARSER ./input +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./glr-regression.at:1861: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS +stderr: +======== Testing with C++ standard flags: '' stderr: -735. glr-regression.at:843: syntax error +stderr: +stderr: +======== Testing with C++ standard flags: '' +stderr: +exception caught: reduction +./glr-regression.at:1311: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +syntax error Discarding 'a'. Reducing 'a'. ./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1678: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +751. glr-regression.at:1311: stderr: ok +stdout: + +stderr: +stdout: +./c++.at:1361: $PREPARSER ./input aaaas +764. glr-regression.at:1862: testing Leaked lookahead after nondeterministic parse syntax error: glr2.cc ... +./glr-regression.at:1862: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +756. glr-regression.at:1678: ok +./c++.at:1360: $PREPARSER ./input aaaal +624. existing.at:1460: ok ======== Testing with C++ standard flags: '' ./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +exception caught: reduction stderr: stdout: -./c++.at:1065: $PREPARSER ./input < in + stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -740. glr-regression.at:946: testing Incorrectly initialized location for empty right-hand side in GLR: glr2.cc ... -error: invalid expression -./glr-regression.at:946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +./glr-regression.at:1785: $PREPARSER ./glr-regr15 +./glr-regression.at:1862: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS +./c++.at:1361: $PREPARSER ./input aaaal +exception caught: yylex +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +766. glr-regression.at:1965: testing Uninitialized location when reporting ambiguity: glr.cc ... +765. glr-regression.at:1964: testing Uninitialized location when reporting ambiguity: glr.c api.pure ... +./glr-regression.at:1964: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.c glr-regr17.y +./glr-regression.at:1965: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y +Ambiguity detected. +Option 1, + ambiguity -> + ambiguity1 -> + +Option 2, + ambiguity -> + ambiguity2 -> + +syntax is ambiguous +./glr-regression.at:1785: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +./c++.at:567: $here/modern +stdout: +./c++.at:92: $PREPARSER ./input +stderr: +stdout: +./c++.at:568: $here/modern ./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -741. glr-regression.at:1036: testing No users destructors if stack 0 deleted: glr.c ... -./glr-regression.at:1036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.c glr-regr9.y -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./glr-regression.at:946: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS -./glr-regression.at:1036: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr9 glr-regr9.c $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: +exception caught: yylex +./glr-regression.at:1965: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1964: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr17 glr-regr17.c $LIBS stdout: -./c++.at:569: $here/modern +./c++.at:1360: $PREPARSER ./input i +stderr: +759. glr-regression.at:1785: ok stdout: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Legac++ -./c++.at:569: $PREPARSER ./list +Legac++ +./c++.at:567: $PREPARSER ./list +./c++.at:568: $PREPARSER ./list + +stderr: +stderr: +stdout: +stdout: +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +exception caught: initial-action stderr: Destroy: "0" Destroy: "0" @@ -264188,34 +263500,6 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./glr-regression.at:739: $PREPARSER ./glr-regr6 -stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:739: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -733. glr-regression.at:739: ok -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS - -stderr: -stdout: -./c++.at:574: $here/modern -stdout: -Legac++ -./c++.at:574: $PREPARSER ./list -stderr: Destroy: "0" Destroy: "0" Destroy: 1 @@ -264237,831 +263521,1059 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input i +767. glr-regression.at:1966: testing Uninitialized location when reporting ambiguity: glr2.cc ... +./glr-regression.at:1966: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -742. glr-regression.at:1037: testing No users destructors if stack 0 deleted: glr.cc ... -./glr-regression.at:1037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: stderr: stderr: stderr: stdout: stdout: stdout: -./glr-regression.at:597: $PREPARSER ./glr-regr4 -./c++.at:659: $PREPARSER ./input -./c++.at:1066: $PREPARSER ./input < in -stderr: -stderr: -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:597: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./glr-regression.at:1037: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:1066: $PREPARSER ./input < in -727. glr-regression.at:597: ok -stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in +stdout: +./glr-regression.at:1038: $PREPARSER ./glr-regr9 +./c++.at:570: $here/modern +./glr-regression.at:1446: $PREPARSER ./glr-regr13 +./glr-regression.at:1679: $PREPARSER ./glr-regr14 stderr: - -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaap +exception caught: initial-action ======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -743. glr-regression.at:1038: testing No users destructors if stack 0 deleted: glr2.cc ... -./glr-regression.at:1038: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:1066: ./check -./glr-regression.at:1038: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaas stderr: stdout: -./glr-regression.at:944: $PREPARSER ./glr-regr8 +./glr-regression.at:946: $PREPARSER ./glr-regr8 +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./glr-regression.at:944: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -738. glr-regression.at:944: ok - stderr: -stdout: -./c++.at:1555: $PREPARSER ./test stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -744. glr-regression.at:1102: testing Corrupted semantic options if user action cuts parse: glr.c ... -./glr-regression.at:1102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.c glr-regr10.y -./glr-regression.at:1102: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr10 glr-regr10.c $LIBS stderr: -stdout: -./glr-regression.at:671: $PREPARSER ./glr-regr5 +exception caught: reduction stderr: stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:671: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +./glr-regression.at:1038: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -730. glr-regression.at:671: ok +./glr-regression.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Legac++ +./glr-regression.at:946: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: $PREPARSER ./list +./glr-regression.at:1679: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:1360: $PREPARSER ./input aaaas - stderr: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaal +stdout: +./glr-regression.at:1860: $PREPARSER ./glr-regr16 +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./glr-regression.at:1966: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./c++.at:1360: $PREPARSER ./input i +./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: -exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaap +743. glr-regression.at:1038: ok stderr: +740. glr-regression.at:946: 754. glr-regression.at:1446: ok + ok +757. glr-regression.at:1679: syntax error stdout: + ok stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:594: $PREPARSER ./glr-regr4 -745. glr-regression.at:1103: testing Corrupted semantic options if user action cuts parse: glr.cc ... -./c++.at:1360: $PREPARSER ./input --debug aaaap -stderr: +./glr-regression.at:1860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1361: $PREPARSER ./input aaaap +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:573: $here/modern stderr: stdout: -./glr-regression.at:1103: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y + + + + +768. glr-regression.at:2035: testing Missed %merge type warnings when LHS type is declared later: glr.c ... +./glr-regression.at:2035: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +771. glr-regression.at:2149: testing Ambiguity reports: glr.c ... +./glr-regression.at:2149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +768. glr-regression.at:2035: ok + +770. glr-regression.at:2037: testing Missed %merge type warnings when LHS type is declared later: glr2.cc ... +769. glr-regression.at:2036: testing Missed %merge type warnings when LHS type is declared later: glr.cc ... +./glr-regression.at:2036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +./glr-regression.at:2037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +770. glr-regression.at:2037: 769. glr-regression.at:2036: ok + ok + + +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./glr-regression.at:2149: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./c++.at:1065: $PREPARSER ./input < in +./c++.at:1064: $PREPARSER ./input < in +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: stderr: +Legac++ +./c++.at:573: $PREPARSER ./list +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -0x5640c8401f40->Object::Object { } -Next token is token 'a' (0x5640c8401f40 'a') -Shifting token 'a' (0x5640c8401f40 'a') +0x564a43fc3f40->Object::Object { } +Next token is token 'a' (0x564a43fc3f40 'a') +Shifting token 'a' (0x564a43fc3f40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5640c8401f40 'a') --> $$ = nterm item (0x5640c8401f40 'a') + $1 = token 'a' (0x564a43fc3f40 'a') +-> $$ = nterm item (0x564a43fc3f40 'a') Entering state 11 Stack now 0 11 Reading a token -0x5640c8401f90->Object::Object { 0x5640c8401f40 } -Next token is token 'a' (0x5640c8401f90 'a') -Shifting token 'a' (0x5640c8401f90 'a') +0x564a43fc3f90->Object::Object { 0x564a43fc3f40 } +Next token is token 'a' (0x564a43fc3f90 'a') +Shifting token 'a' (0x564a43fc3f90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5640c8401f90 'a') --> $$ = nterm item (0x5640c8401f90 'a') + $1 = token 'a' (0x564a43fc3f90 'a') +-> $$ = nterm item (0x564a43fc3f90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x5640c8401fe0->Object::Object { 0x5640c8401f40, 0x5640c8401f90 } -Next token is token 'a' (0x5640c8401fe0 'a') -Shifting token 'a' (0x5640c8401fe0 'a') +0x564a43fc3fe0->Object::Object { 0x564a43fc3f40, 0x564a43fc3f90 } +Next token is token 'a' (0x564a43fc3fe0 'a') +Shifting token 'a' (0x564a43fc3fe0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5640c8401fe0 'a') --> $$ = nterm item (0x5640c8401fe0 'a') + $1 = token 'a' (0x564a43fc3fe0 'a') +-> $$ = nterm item (0x564a43fc3fe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x5640c8402030->Object::Object { 0x5640c8401f40, 0x5640c8401f90, 0x5640c8401fe0 } -Next token is token 'a' (0x5640c8402030 'a') -Shifting token 'a' (0x5640c8402030 'a') +0x564a43fc4030->Object::Object { 0x564a43fc3f40, 0x564a43fc3f90, 0x564a43fc3fe0 } +Next token is token 'a' (0x564a43fc4030 'a') +Shifting token 'a' (0x564a43fc4030 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5640c8402030 'a') --> $$ = nterm item (0x5640c8402030 'a') + $1 = token 'a' (0x564a43fc4030 'a') +-> $$ = nterm item (0x564a43fc4030 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x5640c8402080->Object::Object { 0x5640c8401f40, 0x5640c8401f90, 0x5640c8401fe0, 0x5640c8402030 } -Next token is token 'p' (0x5640c8402080 'p'Exception caught: cleaning lookahead and stack -0x5640c8402080->Object::~Object { 0x5640c8401f40, 0x5640c8401f90, 0x5640c8401fe0, 0x5640c8402030, 0x5640c8402080 } -0x5640c8402030->Object::~Object { 0x5640c8401f40, 0x5640c8401f90, 0x5640c8401fe0, 0x5640c8402030 } -0x5640c8401fe0->Object::~Object { 0x5640c8401f40, 0x5640c8401f90, 0x5640c8401fe0 } -0x5640c8401f90->Object::~Object { 0x5640c8401f40, 0x5640c8401f90 } -0x5640c8401f40->Object::~Object { 0x5640c8401f40 } +0x564a43fc4080->Object::Object { 0x564a43fc3f40, 0x564a43fc3f90, 0x564a43fc3fe0, 0x564a43fc4030 } +Next token is token 'p' (0x564a43fc4080 'p'Exception caught: cleaning lookahead and stack +0x564a43fc4080->Object::~Object { 0x564a43fc3f40, 0x564a43fc3f90, 0x564a43fc3fe0, 0x564a43fc4030, 0x564a43fc4080 } +0x564a43fc4030->Object::~Object { 0x564a43fc3f40, 0x564a43fc3f90, 0x564a43fc3fe0, 0x564a43fc4030 } +0x564a43fc3fe0->Object::~Object { 0x564a43fc3f40, 0x564a43fc3f90, 0x564a43fc3fe0 } +0x564a43fc3f90->Object::~Object { 0x564a43fc3f40, 0x564a43fc3f90 } +0x564a43fc3f40->Object::~Object { 0x564a43fc3f40 } exception caught: printer end { } +./c++.at:1362: $PREPARSER ./input aaaal +772. glr-regression.at:2150: testing Ambiguity reports: glr.cc ... +./glr-regression.at:2150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +762. glr-regression.at:1860: ok +stderr: +stdout: +./glr-regression.at:1104: $PREPARSER ./glr-regr10 + ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:594: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./glr-regression.at:1036: $PREPARSER ./glr-regr9 +stdout: stderr: -memory exhausted -./glr-regression.at:1036: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./glr-regression.at:1104: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: +stderr: +stderr: +stdout: +stderr: +stderr: +stdout: +773. glr-regression.at:2151: testing Ambiguity reports: glr2.cc ... +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stdout: Starting parse Entering state 0 Stack now 0 Reading a token -0x5640c8401f40->Object::Object { } -Next token is token 'a' (0x5640c8401f40 'a') -Shifting token 'a' (0x5640c8401f40 'a') +0x564a43fc3f40->Object::Object { } +Next token is token 'a' (0x564a43fc3f40 'a') +Shifting token 'a' (0x564a43fc3f40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5640c8401f40 'a') --> $$ = nterm item (0x5640c8401f40 'a') + $1 = token 'a' (0x564a43fc3f40 'a') +-> $$ = nterm item (0x564a43fc3f40 'a') Entering state 11 Stack now 0 11 Reading a token -0x5640c8401f90->Object::Object { 0x5640c8401f40 } -Next token is token 'a' (0x5640c8401f90 'a') -Shifting token 'a' (0x5640c8401f90 'a') +0x564a43fc3f90->Object::Object { 0x564a43fc3f40 } +Next token is token 'a' (0x564a43fc3f90 'a') +Shifting token 'a' (0x564a43fc3f90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5640c8401f90 'a') --> $$ = nterm item (0x5640c8401f90 'a') + $1 = token 'a' (0x564a43fc3f90 'a') +-> $$ = nterm item (0x564a43fc3f90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x5640c8401fe0->Object::Object { 0x5640c8401f40, 0x5640c8401f90 } -Next token is token 'a' (0x5640c8401fe0 'a') -Shifting token 'a' (0x5640c8401fe0 'a') +0x564a43fc3fe0->Object::Object { 0x564a43fc3f40, 0x564a43fc3f90 } +Next token is token 'a' (0x564a43fc3fe0 'a') +Shifting token 'a' (0x564a43fc3fe0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5640c8401fe0 'a') --> $$ = nterm item (0x5640c8401fe0 'a') + $1 = token 'a' (0x564a43fc3fe0 'a') +-> $$ = nterm item (0x564a43fc3fe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x5640c8402030->Object::Object { 0x5640c8401f40, 0x5640c8401f90, 0x5640c8401fe0 } -Next token is token 'a' (0x5640c8402030 'a') -Shifting token 'a' (0x5640c8402030 'a') +0x564a43fc4030->Object::Object { 0x564a43fc3f40, 0x564a43fc3f90, 0x564a43fc3fe0 } +Next token is token 'a' (0x564a43fc4030 'a') +Shifting token 'a' (0x564a43fc4030 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5640c8402030 'a') --> $$ = nterm item (0x5640c8402030 'a') + $1 = token 'a' (0x564a43fc4030 'a') +-> $$ = nterm item (0x564a43fc4030 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x5640c8402080->Object::Object { 0x5640c8401f40, 0x5640c8401f90, 0x5640c8401fe0, 0x5640c8402030 } -Next token is token 'p' (0x5640c8402080 'p'Exception caught: cleaning lookahead and stack -0x5640c8402080->Object::~Object { 0x5640c8401f40, 0x5640c8401f90, 0x5640c8401fe0, 0x5640c8402030, 0x5640c8402080 } -0x5640c8402030->Object::~Object { 0x5640c8401f40, 0x5640c8401f90, 0x5640c8401fe0, 0x5640c8402030 } -0x5640c8401fe0->Object::~Object { 0x5640c8401f40, 0x5640c8401f90, 0x5640c8401fe0 } -0x5640c8401f90->Object::~Object { 0x5640c8401f40, 0x5640c8401f90 } -0x5640c8401f40->Object::~Object { 0x5640c8401f40 } +0x564a43fc4080->Object::Object { 0x564a43fc3f40, 0x564a43fc3f90, 0x564a43fc3fe0, 0x564a43fc4030 } +Next token is token 'p' (0x564a43fc4080 'p'Exception caught: cleaning lookahead and stack +0x564a43fc4080->Object::~Object { 0x564a43fc3f40, 0x564a43fc3f90, 0x564a43fc3fe0, 0x564a43fc4030, 0x564a43fc4080 } +0x564a43fc4030->Object::~Object { 0x564a43fc3f40, 0x564a43fc3f90, 0x564a43fc3fe0, 0x564a43fc4030 } +0x564a43fc3fe0->Object::~Object { 0x564a43fc3f40, 0x564a43fc3f90, 0x564a43fc3fe0 } +0x564a43fc3f90->Object::~Object { 0x564a43fc3f40, 0x564a43fc3f90 } +0x564a43fc3f40->Object::~Object { 0x564a43fc3f40 } exception caught: printer end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -725. glr-regression.at:594: ok -stdout: -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -741. glr-regression.at:1036: ok -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./glr-regression.at:1103: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -746. glr-regression.at:1104: testing Corrupted semantic options if user action cuts parse: glr2.cc ... -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:1104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y -stderr: -stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: +./glr-regression.at:2151: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y error: invalid expression caught error error: invalid character caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -747. glr-regression.at:1174: testing Undesirable destructors if user action cuts parse: glr.c ... +./c++.at:1555: $PREPARSER ./test error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1174: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.c glr-regr11.y -./c++.at:1064: $PREPARSER ./input < in -stderr: +caught error error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./glr-regression.at:1104: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./glr-regression.at:1174: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr11 glr-regr11.c $LIBS -stderr: -stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stderr: -stdout: -stdout: -./c++.at:1361: $PREPARSER ./input aaaas +caught error +./c++.at:1363: $PREPARSER ./input aaaas ======== Testing with C++ standard flags: '' -stderr: -stdout: +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:2150: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1360: grep '^exception caught: printer$' stderr stderr: -exception caught: reduction -./glr-regression.at:1102: $PREPARSER ./glr-regr10 -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./glr-regression.at:1102: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal -744. glr-regression.at:1102: ok -stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input i - -stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: +exception caught: reduction Starting parse Entering state 0 Stack now 0 Reading a token -0x558f9226ff40->Object::Object { } -Next token is token 'a' (0x558f9226ff40 'a') -Shifting token 'a' (0x558f9226ff40 'a') +0x5605f2aacf40->Object::Object { } +Next token is token 'a' (0x5605f2aacf40 'a') +Shifting token 'a' (0x5605f2aacf40 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x558f9226ff40 'a') --> $$ = nterm item (0x558f9226ff40 'a') + $1 = token 'a' (0x5605f2aacf40 'a') +-> $$ = nterm item (0x5605f2aacf40 'a') Entering state 10 Stack now 0 10 Reading a token -0x558f9226ff90->Object::Object { 0x558f9226ff40 } -Next token is token 'a' (0x558f9226ff90 'a') -Shifting token 'a' (0x558f9226ff90 'a') +0x5605f2aacf90->Object::Object { 0x5605f2aacf40 } +Next token is token 'a' (0x5605f2aacf90 'a') +Shifting token 'a' (0x5605f2aacf90 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x558f9226ff90 'a') --> $$ = nterm item (0x558f9226ff90 'a') + $1 = token 'a' (0x5605f2aacf90 'a') +-> $$ = nterm item (0x5605f2aacf90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x558f9226ffe0->Object::Object { 0x558f9226ff40, 0x558f9226ff90 } -Next token is token 'a' (0x558f9226ffe0 'a') -Shifting token 'a' (0x558f9226ffe0 'a') +0x5605f2aacfe0->Object::Object { 0x5605f2aacf40, 0x5605f2aacf90 } +Next token is token 'a' (0x5605f2aacfe0 'a') +Shifting token 'a' (0x5605f2aacfe0 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x558f9226ffe0 'a') --> $$ = nterm item (0x558f9226ffe0 'a') + $1 = token 'a' (0x5605f2aacfe0 'a') +-> $$ = nterm item (0x5605f2aacfe0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x558f92270030->Object::Object { 0x558f9226ff40, 0x558f9226ff90, 0x558f9226ffe0 } -Next token is token 'a' (0x558f92270030 'a') -Shifting token 'a' (0x558f92270030 'a') +0x5605f2aad030->Object::Object { 0x5605f2aacf40, 0x5605f2aacf90, 0x5605f2aacfe0 } +Next token is token 'a' (0x5605f2aad030 'a') +Shifting token 'a' (0x5605f2aad030 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x558f92270030 'a') --> $$ = nterm item (0x558f92270030 'a') + $1 = token 'a' (0x5605f2aad030 'a') +-> $$ = nterm item (0x5605f2aad030 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x558f92270080->Object::Object { 0x558f9226ff40, 0x558f9226ff90, 0x558f9226ffe0, 0x558f92270030 } -Next token is token 'p' (0x558f92270080 'p'Exception caught: cleaning lookahead and stack -0x558f92270080->Object::~Object { 0x558f9226ff40, 0x558f9226ff90, 0x558f9226ffe0, 0x558f92270030, 0x558f92270080 } -0x558f92270030->Object::~Object { 0x558f9226ff40, 0x558f9226ff90, 0x558f9226ffe0, 0x558f92270030 } -0x558f9226ffe0->Object::~Object { 0x558f9226ff40, 0x558f9226ff90, 0x558f9226ffe0 } -0x558f9226ff90->Object::~Object { 0x558f9226ff40, 0x558f9226ff90 } -0x558f9226ff40->Object::~Object { 0x558f9226ff40 } +0x5605f2aad080->Object::Object { 0x5605f2aacf40, 0x5605f2aacf90, 0x5605f2aacfe0, 0x5605f2aad030 } +Next token is token 'p' (0x5605f2aad080 'p'Exception caught: cleaning lookahead and stack +0x5605f2aad080->Object::~Object { 0x5605f2aacf40, 0x5605f2aacf90, 0x5605f2aacfe0, 0x5605f2aad030, 0x5605f2aad080 } +0x5605f2aad030->Object::~Object { 0x5605f2aacf40, 0x5605f2aacf90, 0x5605f2aacfe0, 0x5605f2aad030 } +0x5605f2aacfe0->Object::~Object { 0x5605f2aacf40, 0x5605f2aacf90, 0x5605f2aacfe0 } +0x5605f2aacf90->Object::~Object { 0x5605f2aacf40, 0x5605f2aacf90 } +0x5605f2aacf40->Object::~Object { 0x5605f2aacf40 } exception caught: printer end { } +stderr: +stdout: +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1786: $PREPARSER ./glr-regr15 +stdout: +./c++.at:571: $here/modern +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i +774. glr-regression.at:2229: testing Predicates: glr.c ... +./glr-regression.at:2229: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +./c++.at:569: $here/modern +775. glr-regression.at:2230: testing Predicates: glr.cc ... +./glr-regression.at:2230: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:1064: $PREPARSER ./input < in +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: +stdout: +======== Testing with C++ standard flags: '' +./glr-regression.at:1176: $PREPARSER ./glr-regr11 +stderr: +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae +stdout: +stderr: +stdout: +stderr: +746. glr-regression.at:1104: Ambiguity detected. +Option 1, + ambiguity -> + ambiguity1 -> + +Option 2, + ambiguity -> + ambiguity2 -> + +syntax is ambiguous +Legac++ + ok +Legac++ +./c++.at:571: $PREPARSER ./list +./glr-regression.at:1786: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +exception caught: initial-action +./c++.at:569: $PREPARSER ./list + +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2230: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stderr: +stderr: +exception caught: syntax error Starting parse Entering state 0 Stack now 0 Reading a token -0x558f9226ff40->Object::Object { } -Next token is token 'a' (0x558f9226ff40 'a') -Shifting token 'a' (0x558f9226ff40 'a') +0x5605f2aacf40->Object::Object { } +Next token is token 'a' (0x5605f2aacf40 'a') +Shifting token 'a' (0x5605f2aacf40 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x558f9226ff40 'a') --> $$ = nterm item (0x558f9226ff40 'a') + $1 = token 'a' (0x5605f2aacf40 'a') +-> $$ = nterm item (0x5605f2aacf40 'a') Entering state 10 Stack now 0 10 Reading a token -0x558f9226ff90->Object::Object { 0x558f9226ff40 } -Next token is token 'a' (0x558f9226ff90 'a') -Shifting token 'a' (0x558f9226ff90 'a') +0x5605f2aacf90->Object::Object { 0x5605f2aacf40 } +Next token is token 'a' (0x5605f2aacf90 'a') +Shifting token 'a' (0x5605f2aacf90 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x558f9226ff90 'a') --> $$ = nterm item (0x558f9226ff90 'a') + $1 = token 'a' (0x5605f2aacf90 'a') +-> $$ = nterm item (0x5605f2aacf90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x558f9226ffe0->Object::Object { 0x558f9226ff40, 0x558f9226ff90 } -Next token is token 'a' (0x558f9226ffe0 'a') -Shifting token 'a' (0x558f9226ffe0 'a') +0x5605f2aacfe0->Object::Object { 0x5605f2aacf40, 0x5605f2aacf90 } +Next token is token 'a' (0x5605f2aacfe0 'a') +Shifting token 'a' (0x5605f2aacfe0 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x558f9226ffe0 'a') --> $$ = nterm item (0x558f9226ffe0 'a') + $1 = token 'a' (0x5605f2aacfe0 'a') +-> $$ = nterm item (0x5605f2aacfe0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x558f92270030->Object::Object { 0x558f9226ff40, 0x558f9226ff90, 0x558f9226ffe0 } -Next token is token 'a' (0x558f92270030 'a') -Shifting token 'a' (0x558f92270030 'a') +0x5605f2aad030->Object::Object { 0x5605f2aacf40, 0x5605f2aacf90, 0x5605f2aacfe0 } +Next token is token 'a' (0x5605f2aad030 'a') +Shifting token 'a' (0x5605f2aad030 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x558f92270030 'a') --> $$ = nterm item (0x558f92270030 'a') + $1 = token 'a' (0x5605f2aad030 'a') +-> $$ = nterm item (0x5605f2aad030 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x558f92270080->Object::Object { 0x558f9226ff40, 0x558f9226ff90, 0x558f9226ffe0, 0x558f92270030 } -Next token is token 'p' (0x558f92270080 'p'Exception caught: cleaning lookahead and stack -0x558f92270080->Object::~Object { 0x558f9226ff40, 0x558f9226ff90, 0x558f9226ffe0, 0x558f92270030, 0x558f92270080 } -0x558f92270030->Object::~Object { 0x558f9226ff40, 0x558f9226ff90, 0x558f9226ffe0, 0x558f92270030 } -0x558f9226ffe0->Object::~Object { 0x558f9226ff40, 0x558f9226ff90, 0x558f9226ffe0 } -0x558f9226ff90->Object::~Object { 0x558f9226ff40, 0x558f9226ff90 } -0x558f9226ff40->Object::~Object { 0x558f9226ff40 } +0x5605f2aad080->Object::Object { 0x5605f2aacf40, 0x5605f2aacf90, 0x5605f2aacfe0, 0x5605f2aad030 } +Next token is token 'p' (0x5605f2aad080 'p'Exception caught: cleaning lookahead and stack +0x5605f2aad080->Object::~Object { 0x5605f2aacf40, 0x5605f2aacf90, 0x5605f2aacfe0, 0x5605f2aad030, 0x5605f2aad080 } +0x5605f2aad030->Object::~Object { 0x5605f2aacf40, 0x5605f2aacf90, 0x5605f2aacfe0, 0x5605f2aad030 } +0x5605f2aacfe0->Object::~Object { 0x5605f2aacf40, 0x5605f2aacf90, 0x5605f2aacfe0 } +0x5605f2aacf90->Object::~Object { 0x5605f2aacf40, 0x5605f2aacf90 } +0x5605f2aacf40->Object::~Object { 0x5605f2aacf40 } exception caught: printer end { } +./glr-regression.at:1176: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1363: $PREPARSER ./input aaaal +error: invalid expression +./c++.at:1065: $PREPARSER ./input < in +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: grep '^exception caught: printer$' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./glr-regression.at:2151: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +stderr: +stdout: +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:1360: $PREPARSER ./input aaaaE +stderr: exception caught: printer +776. glr-regression.at:2231: testing Predicates: glr2.cc ... +error: invalid expression +======== Testing with C++ standard flags: '' +./c++.at:855: $PREPARSER ./input ./c++.at:1361: $PREPARSER ./input aaaae +./glr-regression.at:2231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +749. glr-regression.at:1176: ./c++.at:1362: $PREPARSER ./input aaaap + ok stderr: -exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE -748. glr-regression.at:1175: testing Undesirable destructors if user action cuts parse: glr.cc ... -./glr-regression.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +760. glr-regression.at:1786: ok +exception caught: yylex stderr: stdout: -./c++.at:1361: $PREPARSER ./input aaaaT -./glr-regression.at:598: $PREPARSER ./glr-regr4 +./c++.at:572: $here/modern stderr: +stdout: +./glr-regression.at:1964: $PREPARSER ./glr-regr17 +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./c++.at:1064: $PREPARSER ./input < in +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:598: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -728. glr-regression.at:598: ok -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: ======== Testing with C++ standard flags: '' -./glr-regression.at:1175: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS - -749. glr-regression.at:1176: testing Undesirable destructors if user action cuts parse: glr2.cc ... -./glr-regression.at:1176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y +stderr: +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +Legac++ stderr: stdout: -./c++.at:941: $PREPARSER ./input +./c++.at:572: $PREPARSER ./list stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +exception caught: syntax error, unexpected end of file, expecting 'a' +stderr: +./c++.at:1363: $PREPARSER ./input i +stderr: +error: invalid character ======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +Ambiguity detected. +Option 1, + start -> + ambig1 -> + sub_ambig2 -> + empty2 -> + 'a' + 'b' + empty1 -> + +Option 2, + start -> + ambig2 -> + sub_ambig2 -> + empty2 -> + 'a' + 'b' + empty2 -> + +1.1-2.2: syntax is ambiguous +./glr-regression.at:1312: $PREPARSER ./glr-regr12 +./glr-regression.at:1964: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -./glr-regression.at:1176: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS stdout: -./c++.at:1362: $PREPARSER ./input aaaas +stdout: +./c++.at:849: $PREPARSER ./input +./glr-regression.at:1861: $PREPARSER ./glr-regr16 +./c++.at:1066: $PREPARSER ./input < in +./c++.at:1065: $PREPARSER ./input < in +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2231: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal stderr: -stdout: stderr: -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1362: $PREPARSER ./input i stderr: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1362: $PREPARSER ./input --debug aaaap +stderr: +stderr: +error: invalid expression +caught error +error: invalid character +caught error exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap +======== Testing with C++ standard flags: '' stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input --debug aaaap +./c++.at:1361: $PREPARSER ./input aaaaE +syntax error +./glr-regression.at:1861: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./glr-regression.at:1312: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid character +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +765. glr-regression.at:1964: ok stderr: +./c++.at:1360: $PREPARSER ./input aaaaT Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffd53b7c93f->Object::Object { } -0x7ffd53b7ca00->Object::Object { 0x7ffd53b7c93f } -0x7ffd53b7c93f->Object::~Object { 0x7ffd53b7c93f, 0x7ffd53b7ca00 } -Next token is token 'a' (0x7ffd53b7ca00 'a') -0x7ffd53b7c970->Object::Object { 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::Object { 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::~Object { 0x7ffd53b7c927, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7ca00->Object::~Object { 0x7ffd53b7c970, 0x7ffd53b7ca00 } -Shifting token 'a' (0x7ffd53b7c970 'a') -0x5634675692e0->Object::Object { 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::Object { 0x5634675692e0, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::~Object { 0x5634675692e0, 0x7ffd53b7c8ff, 0x7ffd53b7c970 } -0x7ffd53b7c970->Object::~Object { 0x5634675692e0, 0x7ffd53b7c970 } +0x7fff76085caf->Object::Object { } +0x7fff76085d70->Object::Object { 0x7fff76085caf } +0x7fff76085caf->Object::~Object { 0x7fff76085caf, 0x7fff76085d70 } +Next token is token 'a' (0x7fff76085d70 'a') +0x7fff76085ce0->Object::Object { 0x7fff76085d70 } +0x7fff76085c97->Object::Object { 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085c97->Object::~Object { 0x7fff76085c97, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085d70->Object::~Object { 0x7fff76085ce0, 0x7fff76085d70 } +Shifting token 'a' (0x7fff76085ce0 'a') +0x55e6d3fda2e0->Object::Object { 0x7fff76085ce0 } +0x7fff76085c6f->Object::Object { 0x55e6d3fda2e0, 0x7fff76085ce0 } +0x7fff76085c6f->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085c6f, 0x7fff76085ce0 } +0x7fff76085ce0->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085ce0 } Entering state 2 Stack now 0 2 -0x7ffd53b7ca20->Object::Object { 0x5634675692e0 } +0x7fff76085d90->Object::Object { 0x55e6d3fda2e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5634675692e0 'a') --> $$ = nterm item (0x7ffd53b7ca20 'a') -0x5634675692e0->Object::~Object { 0x5634675692e0, 0x7ffd53b7ca20 } -0x5634675692e0->Object::Object { 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::Object { 0x5634675692e0, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::~Object { 0x5634675692e0, 0x7ffd53b7c9d8, 0x7ffd53b7ca20 } -0x7ffd53b7ca20->Object::~Object { 0x5634675692e0, 0x7ffd53b7ca20 } + $1 = token 'a' (0x55e6d3fda2e0 'a') +-> $$ = nterm item (0x7fff76085d90 'a') +0x55e6d3fda2e0->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085d90 } +0x55e6d3fda2e0->Object::Object { 0x7fff76085d90 } +0x7fff76085d48->Object::Object { 0x55e6d3fda2e0, 0x7fff76085d90 } +0x7fff76085d48->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085d48, 0x7fff76085d90 } +0x7fff76085d90->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085d90 } Entering state 11 Stack now 0 11 Reading a token -0x7ffd53b7c93f->Object::Object { 0x5634675692e0 } -0x7ffd53b7ca00->Object::Object { 0x5634675692e0, 0x7ffd53b7c93f } -0x7ffd53b7c93f->Object::~Object { 0x5634675692e0, 0x7ffd53b7c93f, 0x7ffd53b7ca00 } -Next token is token 'a' (0x7ffd53b7ca00 'a') -0x7ffd53b7c970->Object::Object { 0x5634675692e0, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::Object { 0x5634675692e0, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::~Object { 0x5634675692e0, 0x7ffd53b7c927, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7ca00->Object::~Object { 0x5634675692e0, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -Shifting token 'a' (0x7ffd53b7c970 'a') -0x563467569300->Object::Object { 0x5634675692e0, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c8ff, 0x7ffd53b7c970 } -0x7ffd53b7c970->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c970 } +0x7fff76085caf->Object::Object { 0x55e6d3fda2e0 } +0x7fff76085d70->Object::Object { 0x55e6d3fda2e0, 0x7fff76085caf } +0x7fff76085caf->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085caf, 0x7fff76085d70 } +Next token is token 'a' (0x7fff76085d70 'a') +0x7fff76085ce0->Object::Object { 0x55e6d3fda2e0, 0x7fff76085d70 } +0x7fff76085c97->Object::Object { 0x55e6d3fda2e0, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085c97->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085c97, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085d70->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085ce0, 0x7fff76085d70 } +Shifting token 'a' (0x7fff76085ce0 'a') +0x55e6d3fda300->Object::Object { 0x55e6d3fda2e0, 0x7fff76085ce0 } +0x7fff76085c6f->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085ce0 } +0x7fff76085c6f->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085c6f, 0x7fff76085ce0 } +0x7fff76085ce0->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085ce0 } Entering state 2 Stack now 0 11 2 -0x7ffd53b7ca20->Object::Object { 0x5634675692e0, 0x563467569300 } +0x7fff76085d90->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x563467569300 'a') --> $$ = nterm item (0x7ffd53b7ca20 'a') -0x563467569300->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca20 } -0x563467569300->Object::Object { 0x5634675692e0, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c9d8, 0x7ffd53b7ca20 } -0x7ffd53b7ca20->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca20 } + $1 = token 'a' (0x55e6d3fda300 'a') +-> $$ = nterm item (0x7fff76085d90 'a') +0x55e6d3fda300->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d90 } +0x55e6d3fda300->Object::Object { 0x55e6d3fda2e0, 0x7fff76085d90 } +0x7fff76085d48->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d90 } +0x7fff76085d48->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d48, 0x7fff76085d90 } +0x7fff76085d90->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d90 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffd53b7c93f->Object::Object { 0x5634675692e0, 0x563467569300 } -0x7ffd53b7ca00->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c93f } -0x7ffd53b7c93f->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c93f, 0x7ffd53b7ca00 } -Next token is token 'a' (0x7ffd53b7ca00 'a') -0x7ffd53b7c970->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c927, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7ca00->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -Shifting token 'a' (0x7ffd53b7c970 'a') -0x563467569320->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c8ff, 0x7ffd53b7c970 } -0x7ffd53b7c970->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c970 } +0x7fff76085caf->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300 } +0x7fff76085d70->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085caf } +0x7fff76085caf->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085caf, 0x7fff76085d70 } +Next token is token 'a' (0x7fff76085d70 'a') +0x7fff76085ce0->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d70 } +0x7fff76085c97->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085c97->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085c97, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085d70->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085ce0, 0x7fff76085d70 } +Shifting token 'a' (0x7fff76085ce0 'a') +0x55e6d3fda320->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085ce0 } +0x7fff76085c6f->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085ce0 } +0x7fff76085c6f->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085c6f, 0x7fff76085ce0 } +0x7fff76085ce0->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085ce0 } Entering state 2 Stack now 0 11 11 2 -0x7ffd53b7ca20->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320 } +0x7fff76085d90->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x563467569320 'a') --> $$ = nterm item (0x7ffd53b7ca20 'a') -0x563467569320->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca20 } -0x563467569320->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c9d8, 0x7ffd53b7ca20 } -0x7ffd53b7ca20->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca20 } + $1 = token 'a' (0x55e6d3fda320 'a') +-> $$ = nterm item (0x7fff76085d90 'a') +0x55e6d3fda320->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d90 } +0x55e6d3fda320->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d90 } +0x7fff76085d48->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d90 } +0x7fff76085d48->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d48, 0x7fff76085d90 } +0x7fff76085d90->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d90 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffd53b7c93f->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320 } -0x7ffd53b7ca00->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c93f } -0x7ffd53b7c93f->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c93f, 0x7ffd53b7ca00 } -Next token is token 'a' (0x7ffd53b7ca00 'a') -0x7ffd53b7c970->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c927, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7ca00->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -Shifting token 'a' (0x7ffd53b7c970 'a') -0x563467569340->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c8ff, 0x7ffd53b7c970 } -0x7ffd53b7c970->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c970 } +0x7fff76085caf->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320 } +0x7fff76085d70->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085caf } +0x7fff76085caf->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085caf, 0x7fff76085d70 } +Next token is token 'a' (0x7fff76085d70 'a') +0x7fff76085ce0->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d70 } +0x7fff76085c97->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085c97->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085c97, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085d70->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085ce0, 0x7fff76085d70 } +Shifting token 'a' (0x7fff76085ce0 'a') +0x55e6d3fda340->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085ce0 } +0x7fff76085c6f->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085ce0 } +0x7fff76085c6f->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085c6f, 0x7fff76085ce0 } +0x7fff76085ce0->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085ce0 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffd53b7ca20->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340 } +0x7fff76085d90->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x563467569340 'a') --> $$ = nterm item (0x7ffd53b7ca20 'a') -0x563467569340->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7ca20 } -0x563467569340->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c9d8, 0x7ffd53b7ca20 } -0x7ffd53b7ca20->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7ca20 } + $1 = token 'a' (0x55e6d3fda340 'a') +-> $$ = nterm item (0x7fff76085d90 'a') +0x55e6d3fda340->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085d90 } +0x55e6d3fda340->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d90 } +0x7fff76085d48->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085d90 } +0x7fff76085d48->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085d48, 0x7fff76085d90 } +0x7fff76085d90->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085d90 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffd53b7c93f->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340 } -0x7ffd53b7ca00->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c93f } -0x7ffd53b7c93f->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c93f, 0x7ffd53b7ca00 } -Next token is token 'p' (0x7ffd53b7ca00 'p'Exception caught: cleaning lookahead and stack -0x563467569340->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7ca00 } -0x563467569320->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca00 } -0x563467569300->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca00 } -0x5634675692e0->Object::~Object { 0x5634675692e0, 0x7ffd53b7ca00 } -0x7ffd53b7ca00->Object::~Object { 0x7ffd53b7ca00 } +0x7fff76085caf->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340 } +0x7fff76085d70->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085caf } +0x7fff76085caf->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085caf, 0x7fff76085d70 } +Next token is token 'p' (0x7fff76085d70 'p'Exception caught: cleaning lookahead and stack +0x55e6d3fda340->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085d70 } +0x55e6d3fda320->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d70 } +0x55e6d3fda300->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d70 } +0x55e6d3fda2e0->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085d70 } +0x7fff76085d70->Object::~Object { 0x7fff76085d70 } exception caught: printer end { } +stderr: +stdout: +./c++.at:659: $PREPARSER ./input +stderr: +stdout: +./c++.at:1555: ./check +stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +752. glr-regression.at:1312: ok +stderr: +stderr: +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +stderr: +stderr: +======== Testing with C++ standard flags: '' +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:574: $here/modern +stdout: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:941: $PREPARSER ./input +763. glr-regression.at:1861: ok +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1361: $PREPARSER ./input aaaaT +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +Legac++ +stderr: +./c++.at:1363: $PREPARSER ./input aaaap +======== Testing with C++ standard flags: '' +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: +./glr-regression.at:1447: $PREPARSER ./glr-regr13 +./c++.at:574: $PREPARSER ./list +error: invalid expression +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffd53b7c93f->Object::Object { } -0x7ffd53b7ca00->Object::Object { 0x7ffd53b7c93f } -0x7ffd53b7c93f->Object::~Object { 0x7ffd53b7c93f, 0x7ffd53b7ca00 } -Next token is token 'a' (0x7ffd53b7ca00 'a') -0x7ffd53b7c970->Object::Object { 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::Object { 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::~Object { 0x7ffd53b7c927, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7ca00->Object::~Object { 0x7ffd53b7c970, 0x7ffd53b7ca00 } -Shifting token 'a' (0x7ffd53b7c970 'a') -0x5634675692e0->Object::Object { 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::Object { 0x5634675692e0, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::~Object { 0x5634675692e0, 0x7ffd53b7c8ff, 0x7ffd53b7c970 } -0x7ffd53b7c970->Object::~Object { 0x5634675692e0, 0x7ffd53b7c970 } +0x7fff76085caf->Object::Object { } +0x7fff76085d70->Object::Object { 0x7fff76085caf } +0x7fff76085caf->Object::~Object { 0x7fff76085caf, 0x7fff76085d70 } +Next token is token 'a' (0x7fff76085d70 'a') +0x7fff76085ce0->Object::Object { 0x7fff76085d70 } +0x7fff76085c97->Object::Object { 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085c97->Object::~Object { 0x7fff76085c97, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085d70->Object::~Object { 0x7fff76085ce0, 0x7fff76085d70 } +Shifting token 'a' (0x7fff76085ce0 'a') +0x55e6d3fda2e0->Object::Object { 0x7fff76085ce0 } +0x7fff76085c6f->Object::Object { 0x55e6d3fda2e0, 0x7fff76085ce0 } +0x7fff76085c6f->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085c6f, 0x7fff76085ce0 } +0x7fff76085ce0->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085ce0 } Entering state 2 Stack now 0 2 -0x7ffd53b7ca20->Object::Object { 0x5634675692e0 } +0x7fff76085d90->Object::Object { 0x55e6d3fda2e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5634675692e0 'a') --> $$ = nterm item (0x7ffd53b7ca20 'a') -0x5634675692e0->Object::~Object { 0x5634675692e0, 0x7ffd53b7ca20 } -0x5634675692e0->Object::Object { 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::Object { 0x5634675692e0, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::~Object { 0x5634675692e0, 0x7ffd53b7c9d8, 0x7ffd53b7ca20 } -0x7ffd53b7ca20->Object::~Object { 0x5634675692e0, 0x7ffd53b7ca20 } + $1 = token 'a' (0x55e6d3fda2e0 'a') +-> $$ = nterm item (0x7fff76085d90 'a') +0x55e6d3fda2e0->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085d90 } +0x55e6d3fda2e0->Object::Object { 0x7fff76085d90 } +0x7fff76085d48->Object::Object { 0x55e6d3fda2e0, 0x7fff76085d90 } +0x7fff76085d48->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085d48, 0x7fff76085d90 } +0x7fff76085d90->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085d90 } Entering state 11 Stack now 0 11 Reading a token -0x7ffd53b7c93f->Object::Object { 0x5634675692e0 } -0x7ffd53b7ca00->Object::Object { 0x5634675692e0, 0x7ffd53b7c93f } -0x7ffd53b7c93f->Object::~Object { 0x5634675692e0, 0x7ffd53b7c93f, 0x7ffd53b7ca00 } -Next token is token 'a' (0x7ffd53b7ca00 'a') -0x7ffd53b7c970->Object::Object { 0x5634675692e0, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::Object { 0x5634675692e0, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::~Object { 0x5634675692e0, 0x7ffd53b7c927, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7ca00->Object::~Object { 0x5634675692e0, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -Shifting token 'a' (0x7ffd53b7c970 'a') -0x563467569300->Object::Object { 0x5634675692e0, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c8ff, 0x7ffd53b7c970 } -0x7ffd53b7c970->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c970 } +0x7fff76085caf->Object::Object { 0x55e6d3fda2e0 } +0x7fff76085d70->Object::Object { 0x55e6d3fda2e0, 0x7fff76085caf } +0x7fff76085caf->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085caf, 0x7fff76085d70 } +Next token is token 'a' (0x7fff76085d70 'a') +0x7fff76085ce0->Object::Object { 0x55e6d3fda2e0, 0x7fff76085d70 } +0x7fff76085c97->Object::Object { 0x55e6d3fda2e0, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085c97->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085c97, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085d70->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085ce0, 0x7fff76085d70 } +Shifting token 'a' (0x7fff76085ce0 'a') +0x55e6d3fda300->Object::Object { 0x55e6d3fda2e0, 0x7fff76085ce0 } +0x7fff76085c6f->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085ce0 } +0x7fff76085c6f->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085c6f, 0x7fff76085ce0 } +0x7fff76085ce0->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085ce0 } Entering state 2 Stack now 0 11 2 -0x7ffd53b7ca20->Object::Object { 0x5634675692e0, 0x563467569300 } +0x7fff76085d90->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x563467569300 'a') --> $$ = nterm item (0x7ffd53b7ca20 'a') -0x563467569300->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca20 } -0x563467569300->Object::Object { 0x5634675692e0, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c9d8, 0x7ffd53b7ca20 } -0x7ffd53b7ca20->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca20 } + $1 = token 'a' (0x55e6d3fda300 'a') +-> $$ = nterm item (0x7fff76085d90 'a') +0x55e6d3fda300->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d90 } +0x55e6d3fda300->Object::Object { 0x55e6d3fda2e0, 0x7fff76085d90 } +0x7fff76085d48->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d90 } +0x7fff76085d48->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d48, 0x7fff76085d90 } +0x7fff76085d90->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d90 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffd53b7c93f->Object::Object { 0x5634675692e0, 0x563467569300 } -0x7ffd53b7ca00->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c93f } -0x7ffd53b7c93f->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c93f, 0x7ffd53b7ca00 } -Next token is token 'a' (0x7ffd53b7ca00 'a') -0x7ffd53b7c970->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c927, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7ca00->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -Shifting token 'a' (0x7ffd53b7c970 'a') -0x563467569320->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c8ff, 0x7ffd53b7c970 } -0x7ffd53b7c970->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c970 } +0x7fff76085caf->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300 } +0x7fff76085d70->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085caf } +0x7fff76085caf->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085caf, 0x7fff76085d70 } +Next token is token 'a' (0x7fff76085d70 'a') +0x7fff76085ce0->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d70 } +0x7fff76085c97->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085c97->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085c97, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085d70->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085ce0, 0x7fff76085d70 } +Shifting token 'a' (0x7fff76085ce0 'a') +0x55e6d3fda320->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085ce0 } +0x7fff76085c6f->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085ce0 } +0x7fff76085c6f->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085c6f, 0x7fff76085ce0 } +0x7fff76085ce0->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085ce0 } Entering state 2 Stack now 0 11 11 2 -0x7ffd53b7ca20->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320 } +0x7fff76085d90->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x563467569320 'a') --> $$ = nterm item (0x7ffd53b7ca20 'a') -0x563467569320->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca20 } -0x563467569320->Object::Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c9d8, 0x7ffd53b7ca20 } -0x7ffd53b7ca20->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca20 } + $1 = token 'a' (0x55e6d3fda320 'a') +-> $$ = nterm item (0x7fff76085d90 'a') +0x55e6d3fda320->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d90 } +0x55e6d3fda320->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d90 } +0x7fff76085d48->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d90 } +0x7fff76085d48->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d48, 0x7fff76085d90 } +0x7fff76085d90->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d90 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffd53b7c93f->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320 } -0x7ffd53b7ca00->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c93f } -0x7ffd53b7c93f->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c93f, 0x7ffd53b7ca00 } -Next token is token 'a' (0x7ffd53b7ca00 'a') -0x7ffd53b7c970->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7c927->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c927, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -0x7ffd53b7ca00->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c970, 0x7ffd53b7ca00 } -Shifting token 'a' (0x7ffd53b7c970 'a') -0x563467569340->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c970 } -0x7ffd53b7c8ff->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c8ff, 0x7ffd53b7c970 } -0x7ffd53b7c970->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c970 } +0x7fff76085caf->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320 } +0x7fff76085d70->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085caf } +0x7fff76085caf->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085caf, 0x7fff76085d70 } +Next token is token 'a' (0x7fff76085d70 'a') +0x7fff76085ce0->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d70 } +0x7fff76085c97->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085c97->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085c97, 0x7fff76085ce0, 0x7fff76085d70 } +0x7fff76085d70->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085ce0, 0x7fff76085d70 } +Shifting token 'a' (0x7fff76085ce0 'a') +0x55e6d3fda340->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085ce0 } +0x7fff76085c6f->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085ce0 } +0x7fff76085c6f->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085c6f, 0x7fff76085ce0 } +0x7fff76085ce0->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085ce0 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffd53b7ca20->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340 } +0x7fff76085d90->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x563467569340 'a') --> $$ = nterm item (0x7ffd53b7ca20 'a') -0x563467569340->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7ca20 } -0x563467569340->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7ca20 } -0x7ffd53b7c9d8->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c9d8, 0x7ffd53b7ca20 } -0x7ffd53b7ca20->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7ca20 } + $1 = token 'a' (0x55e6d3fda340 'a') +-> $$ = nterm item (0x7fff76085d90 'a') +0x55e6d3fda340->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085d90 } +0x55e6d3fda340->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d90 } +0x7fff76085d48->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085d90 } +0x7fff76085d48->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085d48, 0x7fff76085d90 } +0x7fff76085d90->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085d90 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffd53b7c93f->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340 } -0x7ffd53b7ca00->Object::Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c93f } -0x7ffd53b7c93f->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7c93f, 0x7ffd53b7ca00 } -Next token is token 'p' (0x7ffd53b7ca00 'p'Exception caught: cleaning lookahead and stack -0x563467569340->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x563467569340, 0x7ffd53b7ca00 } -0x563467569320->Object::~Object { 0x5634675692e0, 0x563467569300, 0x563467569320, 0x7ffd53b7ca00 } -0x563467569300->Object::~Object { 0x5634675692e0, 0x563467569300, 0x7ffd53b7ca00 } -0x5634675692e0->Object::~Object { 0x5634675692e0, 0x7ffd53b7ca00 } -0x7ffd53b7ca00->Object::~Object { 0x7ffd53b7ca00 } +0x7fff76085caf->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340 } +0x7fff76085d70->Object::Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085caf } +0x7fff76085caf->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085caf, 0x7fff76085d70 } +Next token is token 'p' (0x7fff76085d70 'p'Exception caught: cleaning lookahead and stack +0x55e6d3fda340->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x55e6d3fda340, 0x7fff76085d70 } +0x55e6d3fda320->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x55e6d3fda320, 0x7fff76085d70 } +0x55e6d3fda300->Object::~Object { 0x55e6d3fda2e0, 0x55e6d3fda300, 0x7fff76085d70 } +0x55e6d3fda2e0->Object::~Object { 0x55e6d3fda2e0, 0x7fff76085d70 } +0x7fff76085d70->Object::~Object { 0x7fff76085d70 } exception caught: printer end { } -./c++.at:1362: grep '^exception caught: printer$' stderr +stderr: stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae +./glr-regression.at:1965: $PREPARSER ./glr-regr17 +./c++.at:1362: grep '^exception caught: printer$' stderr +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./c++.at:570: $here/modern -./c++.at:1362: $PREPARSER ./input aaaaE -stdout: stderr: -Modern C++: 201103 -./c++.at:570: $PREPARSER ./list -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1362: $PREPARSER ./input aaaaT +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Ambiguity detected. +Option 1, + start -> + ambig1 -> + sub_ambig2 -> + empty2 -> + 'a' + 'b' + empty1 -> + +Option 2, + start -> + ambig2 -> + sub_ambig2 -> + empty2 -> + 'a' + 'b' + empty2 -> + +1.1-2.2: syntax is ambiguous +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./glr-regression.at:1965: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Destroy: "0" Destroy: "0" Destroy: 1 @@ -265083,80 +264595,618 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +./glr-regression.at:1447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR +stderr: +stdout: +stdout: +./glr-regression.at:2149: $PREPARSER ./input --debug +./c++.at:235: $PREPARSER ./list +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: ======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae +stderr: +./c++.at:1363: $PREPARSER ./input --debug aaaap +Starting parse +Entering state 0 +Reading a token +Next token is token 'a' () +Shifting token 'a' () +Entering state 1 +Reading a token +Next token is token 'b' () +Shifting token 'b' () +Entering state 3 +Reducing stack 0 by rule 3 (line 30): + $1 = token 'b' () +-> $$ = nterm b () +Entering state 4 +Reading a token +Next token is token 'c' () +Shifting token 'c' () +Entering state 6 +Reducing stack 0 by rule 4 (line 31): +-> $$ = nterm d () +Entering state 7 +Reading a token +Now at end of input. +Stack 0 Entering state 7 +Now at end of input. +Splitting off stack 1 from 0. +Reduced stack 1 by rule 2 (line 28); action deferred. Now in state 2. +Stack 1 Entering state 2 +Now at end of input. +Reduced stack 0 by rule 1 (line 27); action deferred. Now in state 2. +Merging stack 0 into stack 1. +Stack 1 Entering state 2 +Now at end of input. +Removing dead stacks. +Rename stack 1 -> 0. +On stack 0, shifting token "end of file" () +Stack 0 now in state 5 +Ambiguity detected. +Option 1, + start -> + 'a' + b + 'c' + d + +Option 2, + start -> + 'a' + b + 'c' + d + +syntax is ambiguous +Cleanup: popping token "end of file" () +Cleanup: popping unresolved nterm start () +Cleanup: popping nterm d () +Cleanup: popping token 'c' () +Cleanup: popping nterm b () +Cleanup: popping token 'a' () +755. glr-regression.at:1447: 766. glr-regression.at:1965: ./glr-regression.at:2149: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +stderr: + ok +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1066: $PREPARSER ./input < in stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' stderr: -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +exception caught: syntax error stderr: -stdout: -./glr-regression.at:844: $PREPARSER ./glr-regr7 +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffd01da679f->Object::Object { } +0x7ffd01da6860->Object::Object { 0x7ffd01da679f } +0x7ffd01da679f->Object::~Object { 0x7ffd01da679f, 0x7ffd01da6860 } +Next token is token 'a' (0x7ffd01da6860 'a') +0x7ffd01da67d0->Object::Object { 0x7ffd01da6860 } +0x7ffd01da6787->Object::Object { 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6787->Object::~Object { 0x7ffd01da6787, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6860->Object::~Object { 0x7ffd01da67d0, 0x7ffd01da6860 } +Shifting token 'a' (0x7ffd01da67d0 'a') +0x55da857332e0->Object::Object { 0x7ffd01da67d0 } +0x7ffd01da675f->Object::Object { 0x55da857332e0, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::~Object { 0x55da857332e0, 0x7ffd01da675f, 0x7ffd01da67d0 } +0x7ffd01da67d0->Object::~Object { 0x55da857332e0, 0x7ffd01da67d0 } +Entering state 1 +Stack now 0 1 +0x7ffd01da6880->Object::Object { 0x55da857332e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55da857332e0 'a') +-> $$ = nterm item (0x7ffd01da6880 'a') +0x55da857332e0->Object::~Object { 0x55da857332e0, 0x7ffd01da6880 } +0x55da857332e0->Object::Object { 0x7ffd01da6880 } +0x7ffd01da6838->Object::Object { 0x55da857332e0, 0x7ffd01da6880 } +0x7ffd01da6838->Object::~Object { 0x55da857332e0, 0x7ffd01da6838, 0x7ffd01da6880 } +0x7ffd01da6880->Object::~Object { 0x55da857332e0, 0x7ffd01da6880 } +Entering state 10 +Stack now 0 10 +Reading a token +0x7ffd01da679f->Object::Object { 0x55da857332e0 } +0x7ffd01da6860->Object::Object { 0x55da857332e0, 0x7ffd01da679f } +0x7ffd01da679f->Object::~Object { 0x55da857332e0, 0x7ffd01da679f, 0x7ffd01da6860 } +Next token is token 'a' (0x7ffd01da6860 'a') +0x7ffd01da67d0->Object::Object { 0x55da857332e0, 0x7ffd01da6860 } +0x7ffd01da6787->Object::Object { 0x55da857332e0, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6787->Object::~Object { 0x55da857332e0, 0x7ffd01da6787, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6860->Object::~Object { 0x55da857332e0, 0x7ffd01da67d0, 0x7ffd01da6860 } +Shifting token 'a' (0x7ffd01da67d0 'a') +0x55da85733300->Object::Object { 0x55da857332e0, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da675f, 0x7ffd01da67d0 } +0x7ffd01da67d0->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da67d0 } +Entering state 1 +Stack now 0 10 1 +0x7ffd01da6880->Object::Object { 0x55da857332e0, 0x55da85733300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55da85733300 'a') +-> $$ = nterm item (0x7ffd01da6880 'a') +0x55da85733300->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6880 } +0x55da85733300->Object::Object { 0x55da857332e0, 0x7ffd01da6880 } +0x7ffd01da6838->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6880 } +0x7ffd01da6838->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6838, 0x7ffd01da6880 } +0x7ffd01da6880->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6880 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0x7ffd01da679f->Object::Object { 0x55da857332e0, 0x55da85733300 } +0x7ffd01da6860->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da679f } +0x7ffd01da679f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da679f, 0x7ffd01da6860 } +Next token is token 'a' (0x7ffd01da6860 'a') +0x7ffd01da67d0->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6860 } +0x7ffd01da6787->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6787->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6787, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6860->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da67d0, 0x7ffd01da6860 } +Shifting token 'a' (0x7ffd01da67d0 'a') +0x55da85733320->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da675f, 0x7ffd01da67d0 } +0x7ffd01da67d0->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da67d0 } +Entering state 1 +Stack now 0 10 10 1 +0x7ffd01da6880->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55da85733320 'a') +-> $$ = nterm item (0x7ffd01da6880 'a') +0x55da85733320->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6880 } +0x55da85733320->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6880 } +0x7ffd01da6838->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6880 } +0x7ffd01da6838->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6838, 0x7ffd01da6880 } +0x7ffd01da6880->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6880 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x7ffd01da679f->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320 } +0x7ffd01da6860->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da679f } +0x7ffd01da679f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da679f, 0x7ffd01da6860 } +Next token is token 'a' (0x7ffd01da6860 'a') +0x7ffd01da67d0->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6860 } +0x7ffd01da6787->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6787->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6787, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6860->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da67d0, 0x7ffd01da6860 } +Shifting token 'a' (0x7ffd01da67d0 'a') +0x55da85733340->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da675f, 0x7ffd01da67d0 } +0x7ffd01da67d0->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da67d0 } +Entering state 1 +Stack now 0 10 10 10 1 +0x7ffd01da6880->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55da85733340 'a') +-> $$ = nterm item (0x7ffd01da6880 'a') +0x55da85733340->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da6880 } +0x55da85733340->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6880 } +0x7ffd01da6838->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da6880 } +0x7ffd01da6838->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da6838, 0x7ffd01da6880 } +0x7ffd01da6880->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da6880 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x7ffd01da679f->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340 } +0x7ffd01da6860->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da679f } +0x7ffd01da679f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da679f, 0x7ffd01da6860 } +Next token is token 'p' (0x7ffd01da6860 'p'Exception caught: cleaning lookahead and stack +0x55da85733340->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da6860 } +0x55da85733320->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6860 } +0x55da85733300->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6860 } +0x55da857332e0->Object::~Object { 0x55da857332e0, 0x7ffd01da6860 } +0x7ffd01da6860->Object::~Object { 0x7ffd01da6860 } +exception caught: printer +end { } +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +771. glr-regression.at:2149: ok stderr: stdout: -./glr-regression.at:1037: $PREPARSER ./glr-regr9 -stdout: +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaaE +error: invalid character +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -memory exhausted -stdout: -./glr-regression.at:1037: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -memory exhausted -./glr-regression.at:844: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -736. glr-regression.at:844: ok +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffd01da679f->Object::Object { } +0x7ffd01da6860->Object::Object { 0x7ffd01da679f } +0x7ffd01da679f->Object::~Object { 0x7ffd01da679f, 0x7ffd01da6860 } +Next token is token 'a' (0x7ffd01da6860 'a') +0x7ffd01da67d0->Object::Object { 0x7ffd01da6860 } +0x7ffd01da6787->Object::Object { 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6787->Object::~Object { 0x7ffd01da6787, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6860->Object::~Object { 0x7ffd01da67d0, 0x7ffd01da6860 } +Shifting token 'a' (0x7ffd01da67d0 'a') +0x55da857332e0->Object::Object { 0x7ffd01da67d0 } +0x7ffd01da675f->Object::Object { 0x55da857332e0, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::~Object { 0x55da857332e0, 0x7ffd01da675f, 0x7ffd01da67d0 } +0x7ffd01da67d0->Object::~Object { 0x55da857332e0, 0x7ffd01da67d0 } +Entering state 1 +Stack now 0 1 +0x7ffd01da6880->Object::Object { 0x55da857332e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55da857332e0 'a') +-> $$ = nterm item (0x7ffd01da6880 'a') +0x55da857332e0->Object::~Object { 0x55da857332e0, 0x7ffd01da6880 } +0x55da857332e0->Object::Object { 0x7ffd01da6880 } +0x7ffd01da6838->Object::Object { 0x55da857332e0, 0x7ffd01da6880 } +0x7ffd01da6838->Object::~Object { 0x55da857332e0, 0x7ffd01da6838, 0x7ffd01da6880 } +0x7ffd01da6880->Object::~Object { 0x55da857332e0, 0x7ffd01da6880 } +Entering state 10 +Stack now 0 10 +Reading a token +0x7ffd01da679f->Object::Object { 0x55da857332e0 } +0x7ffd01da6860->Object::Object { 0x55da857332e0, 0x7ffd01da679f } +0x7ffd01da679f->Object::~Object { 0x55da857332e0, 0x7ffd01da679f, 0x7ffd01da6860 } +Next token is token 'a' (0x7ffd01da6860 'a') +0x7ffd01da67d0->Object::Object { 0x55da857332e0, 0x7ffd01da6860 } +0x7ffd01da6787->Object::Object { 0x55da857332e0, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6787->Object::~Object { 0x55da857332e0, 0x7ffd01da6787, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6860->Object::~Object { 0x55da857332e0, 0x7ffd01da67d0, 0x7ffd01da6860 } +Shifting token 'a' (0x7ffd01da67d0 'a') +0x55da85733300->Object::Object { 0x55da857332e0, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da675f, 0x7ffd01da67d0 } +0x7ffd01da67d0->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da67d0 } +Entering state 1 +Stack now 0 10 1 +0x7ffd01da6880->Object::Object { 0x55da857332e0, 0x55da85733300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55da85733300 'a') +-> $$ = nterm item (0x7ffd01da6880 'a') +0x55da85733300->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6880 } +0x55da85733300->Object::Object { 0x55da857332e0, 0x7ffd01da6880 } +0x7ffd01da6838->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6880 } +0x7ffd01da6838->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6838, 0x7ffd01da6880 } +0x7ffd01da6880->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6880 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0x7ffd01da679f->Object::Object { 0x55da857332e0, 0x55da85733300 } +0x7ffd01da6860->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da679f } +0x7ffd01da679f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da679f, 0x7ffd01da6860 } +Next token is token 'a' (0x7ffd01da6860 'a') +0x7ffd01da67d0->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6860 } +0x7ffd01da6787->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6787->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6787, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6860->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da67d0, 0x7ffd01da6860 } +Shifting token 'a' (0x7ffd01da67d0 'a') +0x55da85733320->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da675f, 0x7ffd01da67d0 } +0x7ffd01da67d0->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da67d0 } +Entering state 1 +Stack now 0 10 10 1 +0x7ffd01da6880->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55da85733320 'a') +-> $$ = nterm item (0x7ffd01da6880 'a') +0x55da85733320->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6880 } +0x55da85733320->Object::Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6880 } +0x7ffd01da6838->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6880 } +0x7ffd01da6838->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6838, 0x7ffd01da6880 } +0x7ffd01da6880->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6880 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x7ffd01da679f->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320 } +0x7ffd01da6860->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da679f } +0x7ffd01da679f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da679f, 0x7ffd01da6860 } +Next token is token 'a' (0x7ffd01da6860 'a') +0x7ffd01da67d0->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6860 } +0x7ffd01da6787->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6787->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6787, 0x7ffd01da67d0, 0x7ffd01da6860 } +0x7ffd01da6860->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da67d0, 0x7ffd01da6860 } +Shifting token 'a' (0x7ffd01da67d0 'a') +0x55da85733340->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da67d0 } +0x7ffd01da675f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da675f, 0x7ffd01da67d0 } +0x7ffd01da67d0->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da67d0 } +Entering state 1 +Stack now 0 10 10 10 1 +0x7ffd01da6880->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55da85733340 'a') +-> $$ = nterm item (0x7ffd01da6880 'a') +0x55da85733340->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da6880 } +0x55da85733340->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6880 } +0x7ffd01da6838->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da6880 } +0x7ffd01da6838->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da6838, 0x7ffd01da6880 } +0x7ffd01da6880->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da6880 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x7ffd01da679f->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340 } +0x7ffd01da6860->Object::Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da679f } +0x7ffd01da679f->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da679f, 0x7ffd01da6860 } +Next token is token 'p' (0x7ffd01da6860 'p'Exception caught: cleaning lookahead and stack +0x55da85733340->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x55da85733340, 0x7ffd01da6860 } +0x55da85733320->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x55da85733320, 0x7ffd01da6860 } +0x55da85733300->Object::~Object { 0x55da857332e0, 0x55da85733300, 0x7ffd01da6860 } +0x55da857332e0->Object::~Object { 0x55da857332e0, 0x7ffd01da6860 } +0x7ffd01da6860->Object::~Object { 0x7ffd01da6860 } +exception caught: printer +end { } +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: grep '^exception caught: printer$' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT +stderr: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -742. glr-regression.at:1037: ok -./glr-regression.at:1174: $PREPARSER ./glr-regr11 +./glr-regression.at:2229: $PREPARSER ./input Nwin +stdout: +======== Testing with C++ standard flags: '' +./glr-regression.at:2150: $PREPARSER ./input --debug +stdout: +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token 'a' () +Shifting token 'a' () +Entering state 1 +Reading a token +Next token is token 'b' () +Shifting token 'b' () +Entering state 3 +Reducing stack 0 by rule 3 (line 30): + $1 = token 'b' () +-> $$ = nterm b () +Entering state 4 +Reading a token +Next token is token 'c' () +Shifting token 'c' () +Entering state 6 +Reducing stack 0 by rule 4 (line 31): +-> $$ = nterm d () +Entering state 7 +Reading a token +Now at end of input. +Stack 0 Entering state 7 +Now at end of input. +Splitting off stack 1 from 0. +Reduced stack 1 by rule 2 (line 28); action deferred. Now in state 2. +Stack 1 Entering state 2 +Now at end of input. +Reduced stack 0 by rule 1 (line 27); action deferred. Now in state 2. +Merging stack 0 into stack 1. +Stack 1 Entering state 2 +Now at end of input. +Removing dead stacks. +Rename stack 1 -> 0. +On stack 0, shifting token "end of file" () +Stack 0 now in state 5 +Ambiguity detected. +Option 1, + start -> + 'a' + b + 'c' + d + +Option 2, + start -> + 'a' + b + 'c' + d +syntax is ambiguous +Cleanup: popping token "end of file" () +Cleanup: popping unresolved nterm start () +Cleanup: popping nterm d () +Cleanup: popping token 'c' () +Cleanup: popping nterm b () +Cleanup: popping token 'a' () +exception caught: syntax error +./glr-regression.at:2150: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./glr-regression.at:1174: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./glr-regression.at:1680: $PREPARSER ./glr-regr14 +stderr: +stdout: +./glr-regression.at:1787: $PREPARSER ./glr-regr15 +stderr: +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $PREPARSER ./input Owin +./c++.at:1362: $PREPARSER ./input aaaaR +Ambiguity detected. +Option 1, + ambiguity -> + ambiguity1 -> -747. glr-regression.at:1174: ok +Option 2, + ambiguity -> + ambiguity2 -> -750. glr-regression.at:1310: testing Leaked semantic values if user action cuts parse: glr.c ... -./glr-regression.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.c glr-regr12.y -751. glr-regression.at:1311: testing Leaked semantic values if user action cuts parse: glr.cc ... -./glr-regression.at:1311: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y -752. glr-regression.at:1312: testing Leaked semantic values if user action cuts parse: glr2.cc ... -./glr-regression.at:1312: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y +syntax is ambiguous +./glr-regression.at:1787: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1680: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +772. glr-regression.at:2150: ok +stderr: stderr: +syntax error, unexpected 'n', expecting 'o' +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +761. glr-regression.at:1787: ok +758. glr-regression.at:1680: stderr: +stderr: +./c++.at:1363: $PREPARSER ./input aaaaE +stderr: + ok stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stdout: +======== Testing with C++ standard flags: '' +stdout: +./glr-regression.at:2230: $PREPARSER ./input Nwin +./glr-regression.at:2229: $PREPARSER ./input Owio +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stderr: +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./glr-regression.at:1311: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS -./glr-regression.at:1310: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr12 glr-regr12.c $LIBS +./c++.at:92: $PREPARSER ./input +./c++.at:1363: $PREPARSER ./input aaaaT stderr: stdout: -./glr-regression.at:356: $PREPARSER ./glr-regr2a input1.txt -./glr-regression.at:1312: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS +./c++.at:566: $here/modern stderr: -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:356: $PREPARSER ./glr-regr2a input2.txt +stdout: +./c++.at:1066: ./check stderr: -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:356: $PREPARSER ./glr-regr2a input3.txt +stdout: +689. c++.at:1371: ok +stdout: +./glr-regression.at:2230: $PREPARSER ./input Owin stderr: -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -719. glr-regression.at:356: ok - +./glr-regression.at:2229: $PREPARSER ./input Nwio +stderr: +Modern C++: 201103 +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: $PREPARSER ./list +stderr: +stderr: +stderr: +stdout: +stderr: +syntax error, unexpected 'n', expecting 'o' +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +syntax error, unexpected 'o', expecting 'n' +./c++.at:1363: $PREPARSER ./input aaaaR +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./glr-regression.at:2230: $PREPARSER ./input Owio +774. glr-regression.at:2229: ok +stderr: +stdout: +./glr-regression.at:1862: $PREPARSER ./glr-regr16 +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stderr: +syntax error +./glr-regression.at:1862: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2230: $PREPARSER ./input Nwio +764. glr-regression.at:1862: ok stderr: stdout: ./c++.at:567: $here/modern +stderr: +stdout: +stderr: +stdout: +./c++.at:568: $here/modern +./c++.at:1064: $PREPARSER ./input < in +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: stdout: +stderr: +Modern C++: 201103 +./c++.at:568: $PREPARSER ./list +syntax error, unexpected 'o', expecting 'n' +./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Modern C++: 201103 ./c++.at:567: $PREPARSER ./list +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) stderr: Destroy: "0" Destroy: "0" @@ -265179,65 +265229,131 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +775. glr-regression.at:2230: ok stderr: stdout: +./c++.at:570: $here/modern +./c++.at:1064: $PREPARSER ./input < in ./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:490: $PREPARSER ./glr-regr3 input.txt -753. glr-regression.at:1445: testing Incorrect lookahead during deterministic GLR: glr.c ... -./glr-regression.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.c glr-regr13.y -stderr: ======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./glr-regression.at:490: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -722. glr-regression.at:490: ok +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS - -./glr-regression.at:1445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr13 glr-regr13.c $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: +Modern C++: 201103 +./c++.at:570: $PREPARSER ./list +stdout: stdout: -./c++.at:856: $PREPARSER ./input stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +error: invalid expression +./c++.at:849: $PREPARSER ./input +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: stderr: -./glr-regression.at:672: $PREPARSER ./glr-regr5 stdout: -./c++.at:1065: $PREPARSER ./input < in +stdout: +stdout: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2151: $PREPARSER ./input --debug +./c++.at:1064: $PREPARSER ./input < in +./glr-regression.at:1966: $PREPARSER ./glr-regr17 +./c++.at:659: $PREPARSER ./input +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: +./c++.at:850: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: -754. glr-regression.at:1446: testing Incorrect lookahead during deterministic GLR: glr.cc ... -./glr-regression.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token 'a' () +Shifting token 'a' () +Entering state 1 +Reading a token +Next token is token 'b' () +Shifting token 'b' () +Entering state 3 +Reducing stack 0 by rule 3 (line 30): + $1 = token 'b' () +-> $$ = nterm b () +Entering state 4 +Reading a token +Next token is token 'c' () +Shifting token 'c' () +Entering state 6 +Reducing stack 0 by rule 4 (line 31): +-> $$ = nterm d () +Entering state 7 +Reading a token +Now at end of input. +Stack 0 Entering state 7 +Now at end of input. +Splitting off stack 1 from 0. +Reduced stack 1 by rule 2 (line 28); action deferred. Now in state 2. +Stack 1 Entering state 2 +Now at end of input. +Reduced stack 0 by rule 1 (line 27); action deferred. Now in state 2. +Merging stack 0 into stack 1. +Stack 1 Entering state 2 +Now at end of input. +Removing dead stacks. +Rename stack 1 -> 0. +On stack 0, shifting token "end of file" () +Stack 0 now in state 5 Ambiguity detected. Option 1, - start -> + start -> 'a' + b + 'c' + d Option 2, - start -> + start -> 'a' + b + 'c' + d syntax is ambiguous -./glr-regression.at:672: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -stderr: -stdout: -error: invalid expression -731. glr-regression.at:672: ./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:659: $PREPARSER ./input - ok -stderr: +Cleanup: popping token "end of file" () +Cleanup: popping unresolved nterm start () +Cleanup: popping nterm d () +Cleanup: popping token 'c' () +Cleanup: popping nterm b () +Cleanup: popping token 'a' () Starting parse Entering state 0 Stack now 0 @@ -265282,63 +265398,433 @@ Cleanup: popping token EOI () Cleanup: popping nterm expr (40) destroy: 40 +./glr-regression.at:2151: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Ambiguity detected. +Option 1, + start -> + ambig1 -> + sub_ambig2 -> + empty2 -> + 'a' + 'b' + empty1 -> + +Option 2, + start -> + ambig2 -> + sub_ambig2 -> + empty2 -> + 'a' + 'b' + empty2 -> + +1.1-2.2: syntax is ambiguous +./glr-regression.at:1966: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +773. glr-regression.at:2151: ok +767. glr-regression.at:1966: ok +======== Testing with C++ standard flags: '' +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +stderr: +stdout: +stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:235: $PREPARSER ./list +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stdout: +stdout: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $PREPARSER ./input aaaas +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:1361: $PREPARSER ./input aaaas +======== Testing with C++ standard flags: '' +stderr: +exception caught: reduction +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaal +./c++.at:1361: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +exception caught: yylex +./c++.at:855: $PREPARSER ./input +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1360: $PREPARSER ./input i +stderr: +./c++.at:1361: $PREPARSER ./input i +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: initial-action +./c++.at:856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaap +stderr: +./c++.at:1361: $PREPARSER ./input aaaap +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1360: $PREPARSER ./input --debug aaaap +./c++.at:1555: $PREPARSER ./test +stderr: +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5589f618cf40->Object::Object { } +Next token is token 'a' (0x5589f618cf40 'a') +Shifting token 'a' (0x5589f618cf40 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5589f618cf40 'a') +-> $$ = nterm item (0x5589f618cf40 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x5589f618cf90->Object::Object { 0x5589f618cf40 } +Next token is token 'a' (0x5589f618cf90 'a') +Shifting token 'a' (0x5589f618cf90 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5589f618cf90 'a') +-> $$ = nterm item (0x5589f618cf90 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x5589f618cfe0->Object::Object { 0x5589f618cf40, 0x5589f618cf90 } +Next token is token 'a' (0x5589f618cfe0 'a') +Shifting token 'a' (0x5589f618cfe0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5589f618cfe0 'a') +-> $$ = nterm item (0x5589f618cfe0 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x5589f618d030->Object::Object { 0x5589f618cf40, 0x5589f618cf90, 0x5589f618cfe0 } +Next token is token 'a' (0x5589f618d030 'a') +Shifting token 'a' (0x5589f618d030 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5589f618d030 'a') +-> $$ = nterm item (0x5589f618d030 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x5589f618d080->Object::Object { 0x5589f618cf40, 0x5589f618cf90, 0x5589f618cfe0, 0x5589f618d030 } +Next token is token 'p' (0x5589f618d080 'p'Exception caught: cleaning lookahead and stack +0x5589f618d080->Object::~Object { 0x5589f618cf40, 0x5589f618cf90, 0x5589f618cfe0, 0x5589f618d030, 0x5589f618d080 } +0x5589f618d030->Object::~Object { 0x5589f618cf40, 0x5589f618cf90, 0x5589f618cfe0, 0x5589f618d030 } +0x5589f618cfe0->Object::~Object { 0x5589f618cf40, 0x5589f618cf90, 0x5589f618cfe0 } +0x5589f618cf90->Object::~Object { 0x5589f618cf40, 0x5589f618cf90 } +0x5589f618cf40->Object::~Object { 0x5589f618cf40 } +exception caught: printer +end { } +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x558e490faf40->Object::Object { } +Next token is token 'a' (0x558e490faf40 'a') +Shifting token 'a' (0x558e490faf40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558e490faf40 'a') +-> $$ = nterm item (0x558e490faf40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x558e490faf90->Object::Object { 0x558e490faf40 } +Next token is token 'a' (0x558e490faf90 'a') +Shifting token 'a' (0x558e490faf90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558e490faf90 'a') +-> $$ = nterm item (0x558e490faf90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x558e490fafe0->Object::Object { 0x558e490faf40, 0x558e490faf90 } +Next token is token 'a' (0x558e490fafe0 'a') +Shifting token 'a' (0x558e490fafe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558e490fafe0 'a') +-> $$ = nterm item (0x558e490fafe0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x558e490fb030->Object::Object { 0x558e490faf40, 0x558e490faf90, 0x558e490fafe0 } +Next token is token 'a' (0x558e490fb030 'a') +Shifting token 'a' (0x558e490fb030 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558e490fb030 'a') +-> $$ = nterm item (0x558e490fb030 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x558e490fb080->Object::Object { 0x558e490faf40, 0x558e490faf90, 0x558e490fafe0, 0x558e490fb030 } +Next token is token 'p' (0x558e490fb080 'p'Exception caught: cleaning lookahead and stack +0x558e490fb080->Object::~Object { 0x558e490faf40, 0x558e490faf90, 0x558e490fafe0, 0x558e490fb030, 0x558e490fb080 } +0x558e490fb030->Object::~Object { 0x558e490faf40, 0x558e490faf90, 0x558e490fafe0, 0x558e490fb030 } +0x558e490fafe0->Object::~Object { 0x558e490faf40, 0x558e490faf90, 0x558e490fafe0 } +0x558e490faf90->Object::~Object { 0x558e490faf40, 0x558e490faf90 } +0x558e490faf40->Object::~Object { 0x558e490faf40 } +exception caught: printer +end { } +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +691. c++.at:1517: ok +stderr: +======== Testing with C++ standard flags: '' +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x558e490faf40->Object::Object { } +Next token is token 'a' (0x558e490faf40 'a') +Shifting token 'a' (0x558e490faf40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558e490faf40 'a') +-> $$ = nterm item (0x558e490faf40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x558e490faf90->Object::Object { 0x558e490faf40 } +Next token is token 'a' (0x558e490faf90 'a') +Shifting token 'a' (0x558e490faf90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558e490faf90 'a') +-> $$ = nterm item (0x558e490faf90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x558e490fafe0->Object::Object { 0x558e490faf40, 0x558e490faf90 } +Next token is token 'a' (0x558e490fafe0 'a') +Shifting token 'a' (0x558e490fafe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558e490fafe0 'a') +-> $$ = nterm item (0x558e490fafe0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x558e490fb030->Object::Object { 0x558e490faf40, 0x558e490faf90, 0x558e490fafe0 } +Next token is token 'a' (0x558e490fb030 'a') +Shifting token 'a' (0x558e490fb030 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x558e490fb030 'a') +-> $$ = nterm item (0x558e490fb030 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x558e490fb080->Object::Object { 0x558e490faf40, 0x558e490faf90, 0x558e490fafe0, 0x558e490fb030 } +Next token is token 'p' (0x558e490fb080 'p'Exception caught: cleaning lookahead and stack +0x558e490fb080->Object::~Object { 0x558e490faf40, 0x558e490faf90, 0x558e490fafe0, 0x558e490fb030, 0x558e490fb080 } +0x558e490fb030->Object::~Object { 0x558e490faf40, 0x558e490faf90, 0x558e490fafe0, 0x558e490fb030 } +0x558e490fafe0->Object::~Object { 0x558e490faf40, 0x558e490faf90, 0x558e490fafe0 } +0x558e490faf90->Object::~Object { 0x558e490faf40, 0x558e490faf90 } +0x558e490faf40->Object::~Object { 0x558e490faf40 } +exception caught: printer +end { } +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5589f618cf40->Object::Object { } +Next token is token 'a' (0x5589f618cf40 'a') +Shifting token 'a' (0x5589f618cf40 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5589f618cf40 'a') +-> $$ = nterm item (0x5589f618cf40 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x5589f618cf90->Object::Object { 0x5589f618cf40 } +Next token is token 'a' (0x5589f618cf90 'a') +Shifting token 'a' (0x5589f618cf90 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5589f618cf90 'a') +-> $$ = nterm item (0x5589f618cf90 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x5589f618cfe0->Object::Object { 0x5589f618cf40, 0x5589f618cf90 } +Next token is token 'a' (0x5589f618cfe0 'a') +Shifting token 'a' (0x5589f618cfe0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5589f618cfe0 'a') +-> $$ = nterm item (0x5589f618cfe0 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x5589f618d030->Object::Object { 0x5589f618cf40, 0x5589f618cf90, 0x5589f618cfe0 } +Next token is token 'a' (0x5589f618d030 'a') +Shifting token 'a' (0x5589f618d030 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5589f618d030 'a') +-> $$ = nterm item (0x5589f618d030 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x5589f618d080->Object::Object { 0x5589f618cf40, 0x5589f618cf90, 0x5589f618cfe0, 0x5589f618d030 } +Next token is token 'p' (0x5589f618d080 'p'Exception caught: cleaning lookahead and stack +0x5589f618d080->Object::~Object { 0x5589f618cf40, 0x5589f618cf90, 0x5589f618cfe0, 0x5589f618d030, 0x5589f618d080 } +0x5589f618d030->Object::~Object { 0x5589f618cf40, 0x5589f618cf90, 0x5589f618cfe0, 0x5589f618d030 } +0x5589f618cfe0->Object::~Object { 0x5589f618cf40, 0x5589f618cf90, 0x5589f618cfe0 } +0x5589f618cf90->Object::~Object { 0x5589f618cf40, 0x5589f618cf90 } +0x5589f618cf40->Object::~Object { 0x5589f618cf40 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr +./c++.at:1361: grep '^exception caught: printer$' stderr +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +stderr: +exception caught: printer +stdout: +./c++.at:1360: $PREPARSER ./input aaaae +exception caught: printer +./c++.at:1361: $PREPARSER ./input aaaae +stdout: +stderr: +exception caught: syntax error +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaE +./c++.at:1361: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1360: $PREPARSER ./input aaaaT ./c++.at:1065: $PREPARSER ./input < in -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - +./c++.at:1361: $PREPARSER ./input aaaaT +stderr: +stderr: +stdout: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./glr-regression.at:2231: $PREPARSER ./input Nwin +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1361: $PREPARSER ./input aaaaR stderr: +error: invalid expression +caught error error: invalid character +caught error ./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./glr-regression.at:2231: $PREPARSER ./input Owin +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1065: $PREPARSER ./input < in ======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./glr-regression.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -755. glr-regression.at:1447: testing Incorrect lookahead during deterministic GLR: glr2.cc ... -./glr-regression.at:1447: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y stderr: -stdout: -./glr-regression.at:945: $PREPARSER ./glr-regr8 +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +syntax error, unexpected 'n', expecting 'o' +./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./glr-regression.at:945: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2231: $PREPARSER ./input Owio stderr: -./glr-regression.at:1447: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS -stdout: -./c++.at:571: $here/modern -739. glr-regression.at:945: ok +./c++.at:1065: $PREPARSER ./input < in +./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2231: $PREPARSER ./input Nwio +stderr: +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +syntax error, unexpected 'o', expecting 'n' +./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +776. glr-regression.at:2231: ok stderr: stdout: +./c++.at:569: $here/modern stdout: Modern C++: 201103 -./c++.at:571: $PREPARSER ./list -./c++.at:572: $here/modern -stdout: -Modern C++: 201103 -./c++.at:572: $PREPARSER ./list - -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -265361,72 +265847,247 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./glr-regression.at:1103: $PREPARSER ./glr-regr10 -756. glr-regression.at:1678: testing Incorrect lookahead during nondeterministic GLR: glr.c ... -./glr-regression.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.c glr-regr14.y -stderr: -./glr-regression.at:1103: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -745. glr-regression.at:1103: ok +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:566: $here/modern -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -Modern C++: 201402 - -./c++.at:566: $PREPARSER ./list +./c++.at:1362: $PREPARSER ./input aaaas stderr: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaap +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffc0515e877->Object::Object { } +0x7ffc0515e910->Object::Object { 0x7ffc0515e877 } +0x7ffc0515e877->Object::~Object { 0x7ffc0515e877, 0x7ffc0515e910 } +Next token is token 'a' (0x7ffc0515e910 'a') +0x7ffc0515e850->Object::Object { 0x7ffc0515e910 } +0x7ffc0515e910->Object::~Object { 0x7ffc0515e850, 0x7ffc0515e910 } +Shifting token 'a' (0x7ffc0515e850 'a') +0x55a76d29a2e0->Object::Object { 0x7ffc0515e850 } +0x7ffc0515e850->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e850 } +Entering state 2 +Stack now 0 2 +0x7ffc0515e930->Object::Object { 0x55a76d29a2e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55a76d29a2e0 'a') +-> $$ = nterm item (0x7ffc0515e930 'a') +0x55a76d29a2e0->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e930 } +0x55a76d29a2e0->Object::Object { 0x7ffc0515e930 } +0x7ffc0515e930->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e930 } +Entering state 11 +Stack now 0 11 +Reading a token +0x7ffc0515e877->Object::Object { 0x55a76d29a2e0 } +0x7ffc0515e910->Object::Object { 0x55a76d29a2e0, 0x7ffc0515e877 } +0x7ffc0515e877->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e877, 0x7ffc0515e910 } +Next token is token 'a' (0x7ffc0515e910 'a') +0x7ffc0515e850->Object::Object { 0x55a76d29a2e0, 0x7ffc0515e910 } +0x7ffc0515e910->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e850, 0x7ffc0515e910 } +Shifting token 'a' (0x7ffc0515e850 'a') +0x55a76d29a300->Object::Object { 0x55a76d29a2e0, 0x7ffc0515e850 } +0x7ffc0515e850->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e850 } +Entering state 2 +Stack now 0 11 2 +0x7ffc0515e930->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55a76d29a300 'a') +-> $$ = nterm item (0x7ffc0515e930 'a') +0x55a76d29a300->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e930 } +0x55a76d29a300->Object::Object { 0x55a76d29a2e0, 0x7ffc0515e930 } +0x7ffc0515e930->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e930 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0x7ffc0515e877->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300 } +0x7ffc0515e910->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e877 } +0x7ffc0515e877->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e877, 0x7ffc0515e910 } +Next token is token 'a' (0x7ffc0515e910 'a') +0x7ffc0515e850->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e910 } +0x7ffc0515e910->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e850, 0x7ffc0515e910 } +Shifting token 'a' (0x7ffc0515e850 'a') +0x55a76d29a320->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e850 } +0x7ffc0515e850->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e850 } +Entering state 2 +Stack now 0 11 11 2 +0x7ffc0515e930->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55a76d29a320 'a') +-> $$ = nterm item (0x7ffc0515e930 'a') +0x55a76d29a320->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e930 } +0x55a76d29a320->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e930 } +0x7ffc0515e930->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e930 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x7ffc0515e877->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320 } +0x7ffc0515e910->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e877 } +0x7ffc0515e877->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e877, 0x7ffc0515e910 } +Next token is token 'a' (0x7ffc0515e910 'a') +0x7ffc0515e850->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e910 } +0x7ffc0515e910->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e850, 0x7ffc0515e910 } +Shifting token 'a' (0x7ffc0515e850 'a') +0x55a76d29a340->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e850 } +0x7ffc0515e850->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e850 } +Entering state 2 +Stack now 0 11 11 11 2 +0x7ffc0515e930->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55a76d29a340 'a') +-> $$ = nterm item (0x7ffc0515e930 'a') +0x55a76d29a340->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e930 } +0x55a76d29a340->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e930 } +0x7ffc0515e930->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e930 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x7ffc0515e877->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340 } +0x7ffc0515e910->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e877 } +0x7ffc0515e877->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e877, 0x7ffc0515e910 } +Next token is token 'p' (0x7ffc0515e910 'p'Exception caught: cleaning lookahead and stack +0x55a76d29a340->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e910 } +0x55a76d29a320->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e910 } +0x55a76d29a300->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e910 } +0x55a76d29a2e0->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e910 } +0x7ffc0515e910->Object::~Object { 0x7ffc0515e910 } +exception caught: printer +end { } +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffc0515e877->Object::Object { } +0x7ffc0515e910->Object::Object { 0x7ffc0515e877 } +0x7ffc0515e877->Object::~Object { 0x7ffc0515e877, 0x7ffc0515e910 } +Next token is token 'a' (0x7ffc0515e910 'a') +0x7ffc0515e850->Object::Object { 0x7ffc0515e910 } +0x7ffc0515e910->Object::~Object { 0x7ffc0515e850, 0x7ffc0515e910 } +Shifting token 'a' (0x7ffc0515e850 'a') +0x55a76d29a2e0->Object::Object { 0x7ffc0515e850 } +0x7ffc0515e850->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e850 } +Entering state 2 +Stack now 0 2 +0x7ffc0515e930->Object::Object { 0x55a76d29a2e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55a76d29a2e0 'a') +-> $$ = nterm item (0x7ffc0515e930 'a') +0x55a76d29a2e0->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e930 } +0x55a76d29a2e0->Object::Object { 0x7ffc0515e930 } +0x7ffc0515e930->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e930 } +Entering state 11 +Stack now 0 11 +Reading a token +0x7ffc0515e877->Object::Object { 0x55a76d29a2e0 } +0x7ffc0515e910->Object::Object { 0x55a76d29a2e0, 0x7ffc0515e877 } +0x7ffc0515e877->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e877, 0x7ffc0515e910 } +Next token is token 'a' (0x7ffc0515e910 'a') +0x7ffc0515e850->Object::Object { 0x55a76d29a2e0, 0x7ffc0515e910 } +0x7ffc0515e910->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e850, 0x7ffc0515e910 } +Shifting token 'a' (0x7ffc0515e850 'a') +0x55a76d29a300->Object::Object { 0x55a76d29a2e0, 0x7ffc0515e850 } +0x7ffc0515e850->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e850 } +Entering state 2 +Stack now 0 11 2 +0x7ffc0515e930->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55a76d29a300 'a') +-> $$ = nterm item (0x7ffc0515e930 'a') +0x55a76d29a300->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e930 } +0x55a76d29a300->Object::Object { 0x55a76d29a2e0, 0x7ffc0515e930 } +0x7ffc0515e930->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e930 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0x7ffc0515e877->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300 } +0x7ffc0515e910->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e877 } +0x7ffc0515e877->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e877, 0x7ffc0515e910 } +Next token is token 'a' (0x7ffc0515e910 'a') +0x7ffc0515e850->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e910 } +0x7ffc0515e910->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e850, 0x7ffc0515e910 } +Shifting token 'a' (0x7ffc0515e850 'a') +0x55a76d29a320->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e850 } +0x7ffc0515e850->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e850 } +Entering state 2 +Stack now 0 11 11 2 +0x7ffc0515e930->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55a76d29a320 'a') +-> $$ = nterm item (0x7ffc0515e930 'a') +0x55a76d29a320->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e930 } +0x55a76d29a320->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e930 } +0x7ffc0515e930->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e930 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x7ffc0515e877->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320 } +0x7ffc0515e910->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e877 } +0x7ffc0515e877->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e877, 0x7ffc0515e910 } +Next token is token 'a' (0x7ffc0515e910 'a') +0x7ffc0515e850->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e910 } +0x7ffc0515e910->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e850, 0x7ffc0515e910 } +Shifting token 'a' (0x7ffc0515e850 'a') +0x55a76d29a340->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e850 } +0x7ffc0515e850->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e850 } +Entering state 2 +Stack now 0 11 11 11 2 +0x7ffc0515e930->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55a76d29a340 'a') +-> $$ = nterm item (0x7ffc0515e930 'a') +0x55a76d29a340->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e930 } +0x55a76d29a340->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e930 } +0x7ffc0515e930->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e930 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x7ffc0515e877->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340 } +0x7ffc0515e910->Object::Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e877 } +0x7ffc0515e877->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e877, 0x7ffc0515e910 } +Next token is token 'p' (0x7ffc0515e910 'p'Exception caught: cleaning lookahead and stack +0x55a76d29a340->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x55a76d29a340, 0x7ffc0515e910 } +0x55a76d29a320->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x55a76d29a320, 0x7ffc0515e910 } +0x55a76d29a300->Object::~Object { 0x55a76d29a2e0, 0x55a76d29a300, 0x7ffc0515e910 } +0x55a76d29a2e0->Object::~Object { 0x55a76d29a2e0, 0x7ffc0515e910 } +0x7ffc0515e910->Object::~Object { 0x7ffc0515e910 } +exception caught: printer +end { } +./c++.at:1362: grep '^exception caught: printer$' stderr stdout: -./c++.at:849: $PREPARSER ./input +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae stderr: -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -757. glr-regression.at:1679: testing Incorrect lookahead during nondeterministic GLR: glr.cc ... -./glr-regression.at:1679: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y -./glr-regression.at:1678: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr14 glr-regr14.c $LIBS -./c++.at:850: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./glr-regression.at:1679: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS stdout: -./c++.at:568: $here/modern -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +exception caught: syntax error +./c++.at:574: $here/modern +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: Modern C++: 201103 -./c++.at:568: $PREPARSER ./list +./c++.at:574: $PREPARSER ./list +./c++.at:1362: $PREPARSER ./input aaaaE +stderr: stderr: Destroy: "" Destroy: "" @@ -265449,18 +266110,24 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./c++.at:1362: $PREPARSER ./input aaaaR ./c++.at:573: $here/modern stderr: -stdout: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: Modern C++: 201103 -./c++.at:235: $PREPARSER ./list ./c++.at:573: $PREPARSER ./list +======== Testing with C++ standard flags: '' stderr: Destroy: "0" Destroy: "0" @@ -265483,49 +266150,50 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -stderr: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' ./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./glr-regression.at:1310: $PREPARSER ./glr-regr12 -stderr: -./glr-regression.at:1310: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -750. glr-regression.at:1310: ok - -stderr: +./c++.at:571: $here/modern stdout: -./c++.at:856: $PREPARSER ./input +Modern C++: 201103 +./c++.at:571: $PREPARSER ./list stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -758. glr-regression.at:1680: testing Incorrect lookahead during nondeterministic GLR: glr2.cc ... -./glr-regression.at:1680: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1363: $PREPARSER ./input aaaas +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -stderr: -./c++.at:569: $here/modern -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: $here/modern stdout: Modern C++: 201103 -./c++.at:1363: $PREPARSER ./input aaaal -./c++.at:569: $PREPARSER ./list -stderr: +./c++.at:572: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -265548,124 +266216,133 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaal +stderr: exception caught: yylex ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input i -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: exception caught: initial-action ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: ./c++.at:1363: $PREPARSER ./input aaaap -./glr-regression.at:1175: $PREPARSER ./glr-regr11 -./glr-regression.at:1680: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS stderr: -stderr: -./glr-regression.at:1175: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: -748. glr-regression.at:1175: ok Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff188d88cf->Object::Object { } -0x7fff188d89b0->Object::Object { 0x7fff188d88cf } -0x7fff188d88cf->Object::~Object { 0x7fff188d88cf, 0x7fff188d89b0 } -Next token is token 'a' (0x7fff188d89b0 'a') -0x7fff188d88f0->Object::Object { 0x7fff188d89b0 } -0x7fff188d89b0->Object::~Object { 0x7fff188d88f0, 0x7fff188d89b0 } -Shifting token 'a' (0x7fff188d88f0 'a') -0x55c6363e82e0->Object::Object { 0x7fff188d88f0 } -0x7fff188d88f0->Object::~Object { 0x55c6363e82e0, 0x7fff188d88f0 } +0x7ffcab66627f->Object::Object { } +0x7ffcab666360->Object::Object { 0x7ffcab66627f } +0x7ffcab66627f->Object::~Object { 0x7ffcab66627f, 0x7ffcab666360 } +Next token is token 'a' (0x7ffcab666360 'a') +0x7ffcab6662a0->Object::Object { 0x7ffcab666360 } +0x7ffcab666360->Object::~Object { 0x7ffcab6662a0, 0x7ffcab666360 } +Shifting token 'a' (0x7ffcab6662a0 'a') +0x5631526d52e0->Object::Object { 0x7ffcab6662a0 } +0x7ffcab6662a0->Object::~Object { 0x5631526d52e0, 0x7ffcab6662a0 } Entering state 1 Stack now 0 1 -0x7fff188d89d0->Object::Object { 0x55c6363e82e0 } +0x7ffcab666380->Object::Object { 0x5631526d52e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c6363e82e0 'a') --> $$ = nterm item (0x7fff188d89d0 'a') -0x55c6363e82e0->Object::~Object { 0x55c6363e82e0, 0x7fff188d89d0 } -0x55c6363e82e0->Object::Object { 0x7fff188d89d0 } -0x7fff188d89d0->Object::~Object { 0x55c6363e82e0, 0x7fff188d89d0 } + $1 = token 'a' (0x5631526d52e0 'a') +-> $$ = nterm item (0x7ffcab666380 'a') +0x5631526d52e0->Object::~Object { 0x5631526d52e0, 0x7ffcab666380 } +0x5631526d52e0->Object::Object { 0x7ffcab666380 } +0x7ffcab666380->Object::~Object { 0x5631526d52e0, 0x7ffcab666380 } Entering state 10 Stack now 0 10 Reading a token -0x7fff188d88cf->Object::Object { 0x55c6363e82e0 } -0x7fff188d89b0->Object::Object { 0x55c6363e82e0, 0x7fff188d88cf } -0x7fff188d88cf->Object::~Object { 0x55c6363e82e0, 0x7fff188d88cf, 0x7fff188d89b0 } -Next token is token 'a' (0x7fff188d89b0 'a') -0x7fff188d88f0->Object::Object { 0x55c6363e82e0, 0x7fff188d89b0 } -0x7fff188d89b0->Object::~Object { 0x55c6363e82e0, 0x7fff188d88f0, 0x7fff188d89b0 } -Shifting token 'a' (0x7fff188d88f0 'a') -0x55c6363e8300->Object::Object { 0x55c6363e82e0, 0x7fff188d88f0 } -0x7fff188d88f0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d88f0 } +0x7ffcab66627f->Object::Object { 0x5631526d52e0 } +0x7ffcab666360->Object::Object { 0x5631526d52e0, 0x7ffcab66627f } +0x7ffcab66627f->Object::~Object { 0x5631526d52e0, 0x7ffcab66627f, 0x7ffcab666360 } +Next token is token 'a' (0x7ffcab666360 'a') +0x7ffcab6662a0->Object::Object { 0x5631526d52e0, 0x7ffcab666360 } +0x7ffcab666360->Object::~Object { 0x5631526d52e0, 0x7ffcab6662a0, 0x7ffcab666360 } +Shifting token 'a' (0x7ffcab6662a0 'a') +0x5631526d5300->Object::Object { 0x5631526d52e0, 0x7ffcab6662a0 } +0x7ffcab6662a0->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab6662a0 } Entering state 1 Stack now 0 10 1 -0x7fff188d89d0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300 } +0x7ffcab666380->Object::Object { 0x5631526d52e0, 0x5631526d5300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c6363e8300 'a') --> $$ = nterm item (0x7fff188d89d0 'a') -0x55c6363e8300->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d89d0 } -0x55c6363e8300->Object::Object { 0x55c6363e82e0, 0x7fff188d89d0 } -0x7fff188d89d0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d89d0 } + $1 = token 'a' (0x5631526d5300 'a') +-> $$ = nterm item (0x7ffcab666380 'a') +0x5631526d5300->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab666380 } +0x5631526d5300->Object::Object { 0x5631526d52e0, 0x7ffcab666380 } +0x7ffcab666380->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab666380 } Entering state 10 Stack now 0 10 10 Reading a token -0x7fff188d88cf->Object::Object { 0x55c6363e82e0, 0x55c6363e8300 } -0x7fff188d89b0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d88cf } -0x7fff188d88cf->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d88cf, 0x7fff188d89b0 } -Next token is token 'a' (0x7fff188d89b0 'a') -0x7fff188d88f0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d89b0 } -0x7fff188d89b0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d88f0, 0x7fff188d89b0 } -Shifting token 'a' (0x7fff188d88f0 'a') -0x55c6363e8320->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d88f0 } -0x7fff188d88f0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d88f0 } +0x7ffcab66627f->Object::Object { 0x5631526d52e0, 0x5631526d5300 } +0x7ffcab666360->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab66627f } +0x7ffcab66627f->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab66627f, 0x7ffcab666360 } +Next token is token 'a' (0x7ffcab666360 'a') +0x7ffcab6662a0->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab666360 } +0x7ffcab666360->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab6662a0, 0x7ffcab666360 } +Shifting token 'a' (0x7ffcab6662a0 'a') +0x5631526d5320->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab6662a0 } +0x7ffcab6662a0->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab6662a0 } Entering state 1 Stack now 0 10 10 1 -0x7fff188d89d0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320 } +0x7ffcab666380->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c6363e8320 'a') --> $$ = nterm item (0x7fff188d89d0 'a') -0x55c6363e8320->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d89d0 } -0x55c6363e8320->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d89d0 } -0x7fff188d89d0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d89d0 } + $1 = token 'a' (0x5631526d5320 'a') +-> $$ = nterm item (0x7ffcab666380 'a') +0x5631526d5320->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab666380 } +0x5631526d5320->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab666380 } +0x7ffcab666380->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab666380 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7fff188d88cf->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320 } -0x7fff188d89b0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d88cf } -0x7fff188d88cf->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d88cf, 0x7fff188d89b0 } -Next token is token 'a' (0x7fff188d89b0 'a') -0x7fff188d88f0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d89b0 } -0x7fff188d89b0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d88f0, 0x7fff188d89b0 } -Shifting token 'a' (0x7fff188d88f0 'a') -0x55c6363e8340->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d88f0 } -0x7fff188d88f0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d88f0 } +0x7ffcab66627f->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320 } +0x7ffcab666360->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab66627f } +0x7ffcab66627f->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab66627f, 0x7ffcab666360 } +Next token is token 'a' (0x7ffcab666360 'a') +0x7ffcab6662a0->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab666360 } +0x7ffcab666360->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab6662a0, 0x7ffcab666360 } +Shifting token 'a' (0x7ffcab6662a0 'a') +0x5631526d5340->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab6662a0 } +0x7ffcab6662a0->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab6662a0 } Entering state 1 Stack now 0 10 10 10 1 -0x7fff188d89d0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340 } +0x7ffcab666380->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c6363e8340 'a') --> $$ = nterm item (0x7fff188d89d0 'a') -0x55c6363e8340->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d89d0 } -0x55c6363e8340->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d89d0 } -0x7fff188d89d0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d89d0 } + $1 = token 'a' (0x5631526d5340 'a') +-> $$ = nterm item (0x7ffcab666380 'a') +0x5631526d5340->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab666380 } +0x5631526d5340->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab666380 } +0x7ffcab666380->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab666380 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7fff188d88cf->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340 } -0x7fff188d89b0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d88cf } -0x7fff188d88cf->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d88cf, 0x7fff188d89b0 } -Next token is token 'p' (0x7fff188d89b0 'p'Exception caught: cleaning lookahead and stack -0x55c6363e8340->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d89b0 } -0x55c6363e8320->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d89b0 } -0x55c6363e8300->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d89b0 } -0x55c6363e82e0->Object::~Object { 0x55c6363e82e0, 0x7fff188d89b0 } -0x7fff188d89b0->Object::~Object { 0x7fff188d89b0 } +0x7ffcab66627f->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340 } +0x7ffcab666360->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab66627f } +0x7ffcab66627f->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab66627f, 0x7ffcab666360 } +Next token is token 'p' (0x7ffcab666360 'p'Exception caught: cleaning lookahead and stack +0x5631526d5340->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab666360 } +0x5631526d5320->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab666360 } +0x5631526d5300->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab666360 } +0x5631526d52e0->Object::~Object { 0x5631526d52e0, 0x7ffcab666360 } +0x7ffcab666360->Object::~Object { 0x7ffcab666360 } exception caught: printer end { } ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -265674,105 +266351,104 @@ Entering state 0 Stack now 0 Reading a token -0x7fff188d88cf->Object::Object { } -0x7fff188d89b0->Object::Object { 0x7fff188d88cf } -0x7fff188d88cf->Object::~Object { 0x7fff188d88cf, 0x7fff188d89b0 } -Next token is token 'a' (0x7fff188d89b0 'a') -0x7fff188d88f0->Object::Object { 0x7fff188d89b0 } -0x7fff188d89b0->Object::~Object { 0x7fff188d88f0, 0x7fff188d89b0 } -Shifting token 'a' (0x7fff188d88f0 'a') -0x55c6363e82e0->Object::Object { 0x7fff188d88f0 } -0x7fff188d88f0->Object::~Object { 0x55c6363e82e0, 0x7fff188d88f0 } +0x7ffcab66627f->Object::Object { } +0x7ffcab666360->Object::Object { 0x7ffcab66627f } +0x7ffcab66627f->Object::~Object { 0x7ffcab66627f, 0x7ffcab666360 } +Next token is token 'a' (0x7ffcab666360 'a') +0x7ffcab6662a0->Object::Object { 0x7ffcab666360 } +0x7ffcab666360->Object::~Object { 0x7ffcab6662a0, 0x7ffcab666360 } +Shifting token 'a' (0x7ffcab6662a0 'a') +0x5631526d52e0->Object::Object { 0x7ffcab6662a0 } +0x7ffcab6662a0->Object::~Object { 0x5631526d52e0, 0x7ffcab6662a0 } Entering state 1 Stack now 0 1 -0x7fff188d89d0->Object::Object { 0x55c6363e82e0 } +0x7ffcab666380->Object::Object { 0x5631526d52e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c6363e82e0 'a') --> $$ = nterm item (0x7fff188d89d0 'a') -0x55c6363e82e0->Object::~Object { 0x55c6363e82e0, 0x7fff188d89d0 } -0x55c6363e82e0->Object::Object { 0x7fff188d89d0 } -0x7fff188d89d0->Object::~Object { 0x55c6363e82e0, 0x7fff188d89d0 } + $1 = token 'a' (0x5631526d52e0 'a') +-> $$ = nterm item (0x7ffcab666380 'a') +0x5631526d52e0->Object::~Object { 0x5631526d52e0, 0x7ffcab666380 } +0x5631526d52e0->Object::Object { 0x7ffcab666380 } +0x7ffcab666380->Object::~Object { 0x5631526d52e0, 0x7ffcab666380 } Entering state 10 Stack now 0 10 Reading a token -0x7fff188d88cf->Object::Object { 0x55c6363e82e0 } -0x7fff188d89b0->Object::Object { 0x55c6363e82e0, 0x7fff188d88cf } -0x7fff188d88cf->Object::~Object { 0x55c6363e82e0, 0x7fff188d88cf, 0x7fff188d89b0 } -Next token is token 'a' (0x7fff188d89b0 'a') -0x7fff188d88f0->Object::Object { 0x55c6363e82e0, 0x7fff188d89b0 } -0x7fff188d89b0->Object::~Object { 0x55c6363e82e0, 0x7fff188d88f0, 0x7fff188d89b0 } -Shifting token 'a' (0x7fff188d88f0 'a') -0x55c6363e8300->Object::Object { 0x55c6363e82e0, 0x7fff188d88f0 } -0x7fff188d88f0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d88f0 } +0x7ffcab66627f->Object::Object { 0x5631526d52e0 } +0x7ffcab666360->Object::Object { 0x5631526d52e0, 0x7ffcab66627f } +0x7ffcab66627f->Object::~Object { 0x5631526d52e0, 0x7ffcab66627f, 0x7ffcab666360 } +Next token is token 'a' (0x7ffcab666360 'a') +0x7ffcab6662a0->Object::Object { 0x5631526d52e0, 0x7ffcab666360 } +0x7ffcab666360->Object::~Object { 0x5631526d52e0, 0x7ffcab6662a0, 0x7ffcab666360 } +Shifting token 'a' (0x7ffcab6662a0 'a') +0x5631526d5300->Object::Object { 0x5631526d52e0, 0x7ffcab6662a0 } +0x7ffcab6662a0->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab6662a0 } Entering state 1 Stack now 0 10 1 -0x7fff188d89d0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300 } +0x7ffcab666380->Object::Object { 0x5631526d52e0, 0x5631526d5300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c6363e8300 'a') --> $$ = nterm item (0x7fff188d89d0 'a') -0x55c6363e8300->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d89d0 } -0x55c6363e8300->Object::Object { 0x55c6363e82e0, 0x7fff188d89d0 } -0x7fff188d89d0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d89d0 } + $1 = token 'a' (0x5631526d5300 'a') +-> $$ = nterm item (0x7ffcab666380 'a') +0x5631526d5300->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab666380 } +0x5631526d5300->Object::Object { 0x5631526d52e0, 0x7ffcab666380 } +0x7ffcab666380->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab666380 } Entering state 10 Stack now 0 10 10 Reading a token -0x7fff188d88cf->Object::Object { 0x55c6363e82e0, 0x55c6363e8300 } -0x7fff188d89b0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d88cf } -0x7fff188d88cf->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d88cf, 0x7fff188d89b0 } -Next token is token 'a' (0x7fff188d89b0 'a') -0x7fff188d88f0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d89b0 } -0x7fff188d89b0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d88f0, 0x7fff188d89b0 } -Shifting token 'a' (0x7fff188d88f0 'a') -0x55c6363e8320->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d88f0 } -0x7fff188d88f0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d88f0 } +0x7ffcab66627f->Object::Object { 0x5631526d52e0, 0x5631526d5300 } +0x7ffcab666360->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab66627f } +0x7ffcab66627f->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab66627f, 0x7ffcab666360 } +Next token is token 'a' (0x7ffcab666360 'a') +0x7ffcab6662a0->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab666360 } +0x7ffcab666360->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab6662a0, 0x7ffcab666360 } +Shifting token 'a' (0x7ffcab6662a0 'a') +0x5631526d5320->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab6662a0 } +0x7ffcab6662a0->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab6662a0 } Entering state 1 Stack now 0 10 10 1 -0x7fff188d89d0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320 } +0x7ffcab666380->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c6363e8320 'a') --> $$ = nterm item (0x7fff188d89d0 'a') -0x55c6363e8320->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d89d0 } -0x55c6363e8320->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d89d0 } -0x7fff188d89d0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d89d0 } + $1 = token 'a' (0x5631526d5320 'a') +-> $$ = nterm item (0x7ffcab666380 'a') +0x5631526d5320->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab666380 } +0x5631526d5320->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab666380 } +0x7ffcab666380->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab666380 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7fff188d88cf->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320 } -0x7fff188d89b0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d88cf } -0x7fff188d88cf->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d88cf, 0x7fff188d89b0 } -Next token is token 'a' (0x7fff188d89b0 'a') -0x7fff188d88f0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d89b0 } -0x7fff188d89b0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d88f0, 0x7fff188d89b0 } -Shifting token 'a' (0x7fff188d88f0 'a') -0x55c6363e8340->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d88f0 } -0x7fff188d88f0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d88f0 } +0x7ffcab66627f->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320 } +0x7ffcab666360->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab66627f } +0x7ffcab66627f->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab66627f, 0x7ffcab666360 } +Next token is token 'a' (0x7ffcab666360 'a') +0x7ffcab6662a0->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab666360 } +0x7ffcab666360->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab6662a0, 0x7ffcab666360 } +Shifting token 'a' (0x7ffcab6662a0 'a') +0x5631526d5340->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab6662a0 } +0x7ffcab6662a0->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab6662a0 } Entering state 1 Stack now 0 10 10 10 1 -0x7fff188d89d0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340 } +0x7ffcab666380->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c6363e8340 'a') --> $$ = nterm item (0x7fff188d89d0 'a') -0x55c6363e8340->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d89d0 } -0x55c6363e8340->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d89d0 } -0x7fff188d89d0->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d89d0 } + $1 = token 'a' (0x5631526d5340 'a') +-> $$ = nterm item (0x7ffcab666380 'a') +0x5631526d5340->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab666380 } +0x5631526d5340->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab666380 } +0x7ffcab666380->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab666380 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7fff188d88cf->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340 } -0x7fff188d89b0->Object::Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d88cf } -0x7fff188d88cf->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d88cf, 0x7fff188d89b0 } -Next token is token 'p' (0x7fff188d89b0 'p'Exception caught: cleaning lookahead and stack -0x55c6363e8340->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x55c6363e8340, 0x7fff188d89b0 } -0x55c6363e8320->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x55c6363e8320, 0x7fff188d89b0 } -0x55c6363e8300->Object::~Object { 0x55c6363e82e0, 0x55c6363e8300, 0x7fff188d89b0 } -0x55c6363e82e0->Object::~Object { 0x55c6363e82e0, 0x7fff188d89b0 } -0x7fff188d89b0->Object::~Object { 0x7fff188d89b0 } +0x7ffcab66627f->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340 } +0x7ffcab666360->Object::Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab66627f } +0x7ffcab66627f->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab66627f, 0x7ffcab666360 } +Next token is token 'p' (0x7ffcab666360 'p'Exception caught: cleaning lookahead and stack +0x5631526d5340->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x5631526d5340, 0x7ffcab666360 } +0x5631526d5320->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x5631526d5320, 0x7ffcab666360 } +0x5631526d5300->Object::~Object { 0x5631526d52e0, 0x5631526d5300, 0x7ffcab666360 } +0x5631526d52e0->Object::~Object { 0x5631526d52e0, 0x7ffcab666360 } +0x7ffcab666360->Object::~Object { 0x7ffcab666360 } exception caught: printer end { } ./c++.at:1363: grep '^exception caught: printer$' stderr stdout: exception caught: printer - ./c++.at:1363: $PREPARSER ./input aaaae stderr: exception caught: syntax error @@ -265783,509 +266459,321 @@ ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaaT stderr: -stdout: -stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:845: $PREPARSER ./glr-regr7 -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: ./c++.at:1363: $PREPARSER ./input aaaaR -memory exhausted -./glr-regression.at:845: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -737. glr-regression.at:845: ok ./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -759. glr-regression.at:1785: testing Leaked semantic values when reporting ambiguity: glr.c ... -./glr-regression.at:1785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.c glr-regr15.y - -./glr-regression.at:1785: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr15 glr-regr15.c $LIBS -760. glr-regression.at:1786: testing Leaked semantic values when reporting ambiguity: glr.cc ... -./glr-regression.at:1786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y -./glr-regression.at:1786: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS stderr: stdout: -./glr-regression.at:1678: $PREPARSER ./glr-regr14 -stderr: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./glr-regression.at:1678: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./glr-regression.at:1445: $PREPARSER ./glr-regr13 +./c++.at:856: $PREPARSER ./input stderr: -756. glr-regression.at:1678: ok -./glr-regression.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -753. glr-regression.at:1445: ok - +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1555: $PREPARSER ./test - -stderr: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none stderr: stdout: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -======== Testing with C++ standard flags: '' -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -761. glr-regression.at:1787: testing Leaked semantic values when reporting ambiguity: glr2.cc ... -./c++.at:1064: $PREPARSER ./input < in -./glr-regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y +./c++.at:92: $PREPARSER ./input stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +663. c++.at:26: ok stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: -762. glr-regression.at:1860: testing Leaked lookahead after nondeterministic parse syntax error: glr.c ... -./glr-regression.at:1860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.c glr-regr16.y stdout: -./c++.at:1360: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./glr-regression.at:1787: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaal -stderr: -stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1860: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr16 glr-regr16.c $LIBS stdout: -./c++.at:1360: $PREPARSER ./input i -stderr: -./c++.at:1555: ./check -exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -./c++.at:1360: $PREPARSER ./input aaaap -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input --debug aaaap +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:659: $PREPARSER ./input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x556e986eaf40->Object::Object { } -Next token is token 'a' (0x556e986eaf40 'a') -Shifting token 'a' (0x556e986eaf40 'a') +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 Entering state 2 Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556e986eaf40 'a') --> $$ = nterm item (0x556e986eaf40 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x556e986eaf90->Object::Object { 0x556e986eaf40 } -Next token is token 'a' (0x556e986eaf90 'a') -Shifting token 'a' (0x556e986eaf90 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556e986eaf90 'a') --> $$ = nterm item (0x556e986eaf90 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x556e986eafe0->Object::Object { 0x556e986eaf40, 0x556e986eaf90 } -Next token is token 'a' (0x556e986eafe0 'a') -Shifting token 'a' (0x556e986eafe0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556e986eafe0 'a') --> $$ = nterm item (0x556e986eafe0 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x556e986eb030->Object::Object { 0x556e986eaf40, 0x556e986eaf90, 0x556e986eafe0 } -Next token is token 'a' (0x556e986eb030 'a') -Shifting token 'a' (0x556e986eb030 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556e986eb030 'a') --> $$ = nterm item (0x556e986eb030 'a') -Entering state 11 -Stack now 0 11 11 11 11 Reading a token -0x556e986eb080->Object::Object { 0x556e986eaf40, 0x556e986eaf90, 0x556e986eafe0, 0x556e986eb030 } -Next token is token 'p' (0x556e986eb080 'p'Exception caught: cleaning lookahead and stack -0x556e986eb080->Object::~Object { 0x556e986eaf40, 0x556e986eaf90, 0x556e986eafe0, 0x556e986eb030, 0x556e986eb080 } -0x556e986eb030->Object::~Object { 0x556e986eaf40, 0x556e986eaf90, 0x556e986eafe0, 0x556e986eb030 } -0x556e986eafe0->Object::~Object { 0x556e986eaf40, 0x556e986eaf90, 0x556e986eafe0 } -0x556e986eaf90->Object::~Object { 0x556e986eaf40, 0x556e986eaf90 } -0x556e986eaf40->Object::~Object { 0x556e986eaf40 } -exception caught: printer -end { } -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x556e986eaf40->Object::Object { } -Next token is token 'a' (0x556e986eaf40 'a') -Shifting token 'a' (0x556e986eaf40 'a') +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 Entering state 2 Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556e986eaf40 'a') --> $$ = nterm item (0x556e986eaf40 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x556e986eaf90->Object::Object { 0x556e986eaf40 } -Next token is token 'a' (0x556e986eaf90 'a') -Shifting token 'a' (0x556e986eaf90 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556e986eaf90 'a') --> $$ = nterm item (0x556e986eaf90 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x556e986eafe0->Object::Object { 0x556e986eaf40, 0x556e986eaf90 } -Next token is token 'a' (0x556e986eafe0 'a') -Shifting token 'a' (0x556e986eafe0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556e986eafe0 'a') --> $$ = nterm item (0x556e986eafe0 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x556e986eb030->Object::Object { 0x556e986eaf40, 0x556e986eaf90, 0x556e986eafe0 } -Next token is token 'a' (0x556e986eb030 'a') -Shifting token 'a' (0x556e986eb030 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556e986eb030 'a') --> $$ = nterm item (0x556e986eb030 'a') -Entering state 11 -Stack now 0 11 11 11 11 Reading a token -0x556e986eb080->Object::Object { 0x556e986eaf40, 0x556e986eaf90, 0x556e986eafe0, 0x556e986eb030 } -Next token is token 'p' (0x556e986eb080 'p'Exception caught: cleaning lookahead and stack -0x556e986eb080->Object::~Object { 0x556e986eaf40, 0x556e986eaf90, 0x556e986eafe0, 0x556e986eb030, 0x556e986eb080 } -0x556e986eb030->Object::~Object { 0x556e986eaf40, 0x556e986eaf90, 0x556e986eafe0, 0x556e986eb030 } -0x556e986eafe0->Object::~Object { 0x556e986eaf40, 0x556e986eaf90, 0x556e986eafe0 } -0x556e986eaf90->Object::~Object { 0x556e986eaf40, 0x556e986eaf90 } -0x556e986eaf40->Object::~Object { 0x556e986eaf40 } -exception caught: printer -end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -689. c++.at:1371: ok -stderr: -stderr: -exception caught: syntax error -stdout: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1446: $PREPARSER ./glr-regr13 -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./glr-regression.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -stdout: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1311: $PREPARSER ./glr-regr12 - -754. glr-regression.at:1446: ok -stderr: -./c++.at:1360: $PREPARSER ./input aaaaT -./glr-regression.at:1311: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -751. glr-regression.at:1311: ok -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR - -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -763. glr-regression.at:1861: testing Leaked lookahead after nondeterministic parse syntax error: glr.cc ... -./glr-regression.at:1861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y -764. glr-regression.at:1862: testing Leaked lookahead after nondeterministic parse syntax error: glr2.cc ... -./glr-regression.at:1862: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y -765. glr-regression.at:1964: testing Uninitialized location when reporting ambiguity: glr.c api.pure ... -./glr-regression.at:1964: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.c glr-regr17.y +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: ./c++.at:1361: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1861: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS -./glr-regression.at:1862: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS ./c++.at:1361: $PREPARSER ./input aaaal stderr: exception caught: yylex ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1964: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr17 glr-regr17.c $LIBS +stderr: ./c++.at:1361: $PREPARSER ./input i +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./c++.at:1065: $PREPARSER ./input < in stderr: exception caught: initial-action ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaap stderr: +./c++.at:1065: $PREPARSER ./input < in ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +error: invalid expression +./c++.at:850: $PREPARSER ./input +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1065: $PREPARSER ./input < in Starting parse Entering state 0 Stack now 0 Reading a token -0x55ff4daebf40->Object::Object { } -Next token is token 'a' (0x55ff4daebf40 'a') -Shifting token 'a' (0x55ff4daebf40 'a') +0x55604fcdff40->Object::Object { } +Next token is token 'a' (0x55604fcdff40 'a') +Shifting token 'a' (0x55604fcdff40 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ff4daebf40 'a') --> $$ = nterm item (0x55ff4daebf40 'a') + $1 = token 'a' (0x55604fcdff40 'a') +-> $$ = nterm item (0x55604fcdff40 'a') Entering state 10 Stack now 0 10 Reading a token -0x55ff4daebf90->Object::Object { 0x55ff4daebf40 } -Next token is token 'a' (0x55ff4daebf90 'a') -Shifting token 'a' (0x55ff4daebf90 'a') +0x55604fcdff90->Object::Object { 0x55604fcdff40 } +Next token is token 'a' (0x55604fcdff90 'a') +Shifting token 'a' (0x55604fcdff90 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ff4daebf90 'a') --> $$ = nterm item (0x55ff4daebf90 'a') + $1 = token 'a' (0x55604fcdff90 'a') +-> $$ = nterm item (0x55604fcdff90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x55ff4daebfe0->Object::Object { 0x55ff4daebf40, 0x55ff4daebf90 } -Next token is token 'a' (0x55ff4daebfe0 'a') -Shifting token 'a' (0x55ff4daebfe0 'a') +0x55604fcdffe0->Object::Object { 0x55604fcdff40, 0x55604fcdff90 } +Next token is token 'a' (0x55604fcdffe0 'a') +Shifting token 'a' (0x55604fcdffe0 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ff4daebfe0 'a') --> $$ = nterm item (0x55ff4daebfe0 'a') + $1 = token 'a' (0x55604fcdffe0 'a') +-> $$ = nterm item (0x55604fcdffe0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x55ff4daec030->Object::Object { 0x55ff4daebf40, 0x55ff4daebf90, 0x55ff4daebfe0 } -Next token is token 'a' (0x55ff4daec030 'a') -Shifting token 'a' (0x55ff4daec030 'a') +0x55604fce0030->Object::Object { 0x55604fcdff40, 0x55604fcdff90, 0x55604fcdffe0 } +Next token is token 'a' (0x55604fce0030 'a') +Shifting token 'a' (0x55604fce0030 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ff4daec030 'a') --> $$ = nterm item (0x55ff4daec030 'a') + $1 = token 'a' (0x55604fce0030 'a') +-> $$ = nterm item (0x55604fce0030 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x55ff4daec080->Object::Object { 0x55ff4daebf40, 0x55ff4daebf90, 0x55ff4daebfe0, 0x55ff4daec030 } -Next token is token 'p' (0x55ff4daec080 'p'Exception caught: cleaning lookahead and stack -0x55ff4daec080->Object::~Object { 0x55ff4daebf40, 0x55ff4daebf90, 0x55ff4daebfe0, 0x55ff4daec030, 0x55ff4daec080 } -0x55ff4daec030->Object::~Object { 0x55ff4daebf40, 0x55ff4daebf90, 0x55ff4daebfe0, 0x55ff4daec030 } -0x55ff4daebfe0->Object::~Object { 0x55ff4daebf40, 0x55ff4daebf90, 0x55ff4daebfe0 } -0x55ff4daebf90->Object::~Object { 0x55ff4daebf40, 0x55ff4daebf90 } -0x55ff4daebf40->Object::~Object { 0x55ff4daebf40 } +0x55604fce0080->Object::Object { 0x55604fcdff40, 0x55604fcdff90, 0x55604fcdffe0, 0x55604fce0030 } +Next token is token 'p' (0x55604fce0080 'p'Exception caught: cleaning lookahead and stack +0x55604fce0080->Object::~Object { 0x55604fcdff40, 0x55604fcdff90, 0x55604fcdffe0, 0x55604fce0030, 0x55604fce0080 } +0x55604fce0030->Object::~Object { 0x55604fcdff40, 0x55604fcdff90, 0x55604fcdffe0, 0x55604fce0030 } +0x55604fcdffe0->Object::~Object { 0x55604fcdff40, 0x55604fcdff90, 0x55604fcdffe0 } +0x55604fcdff90->Object::~Object { 0x55604fcdff40, 0x55604fcdff90 } +0x55604fcdff40->Object::~Object { 0x55604fcdff40 } exception caught: printer end { } ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x55ff4daebf40->Object::Object { } -Next token is token 'a' (0x55ff4daebf40 'a') -Shifting token 'a' (0x55ff4daebf40 'a') +0x55604fcdff40->Object::Object { } +Next token is token 'a' (0x55604fcdff40 'a') +Shifting token 'a' (0x55604fcdff40 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ff4daebf40 'a') --> $$ = nterm item (0x55ff4daebf40 'a') + $1 = token 'a' (0x55604fcdff40 'a') +-> $$ = nterm item (0x55604fcdff40 'a') Entering state 10 Stack now 0 10 Reading a token -0x55ff4daebf90->Object::Object { 0x55ff4daebf40 } -Next token is token 'a' (0x55ff4daebf90 'a') -Shifting token 'a' (0x55ff4daebf90 'a') +0x55604fcdff90->Object::Object { 0x55604fcdff40 } +Next token is token 'a' (0x55604fcdff90 'a') +Shifting token 'a' (0x55604fcdff90 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ff4daebf90 'a') --> $$ = nterm item (0x55ff4daebf90 'a') + $1 = token 'a' (0x55604fcdff90 'a') +-> $$ = nterm item (0x55604fcdff90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x55ff4daebfe0->Object::Object { 0x55ff4daebf40, 0x55ff4daebf90 } -Next token is token 'a' (0x55ff4daebfe0 'a') -Shifting token 'a' (0x55ff4daebfe0 'a') +0x55604fcdffe0->Object::Object { 0x55604fcdff40, 0x55604fcdff90 } +Next token is token 'a' (0x55604fcdffe0 'a') +Shifting token 'a' (0x55604fcdffe0 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ff4daebfe0 'a') --> $$ = nterm item (0x55ff4daebfe0 'a') + $1 = token 'a' (0x55604fcdffe0 'a') +-> $$ = nterm item (0x55604fcdffe0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x55ff4daec030->Object::Object { 0x55ff4daebf40, 0x55ff4daebf90, 0x55ff4daebfe0 } -Next token is token 'a' (0x55ff4daec030 'a') -Shifting token 'a' (0x55ff4daec030 'a') +0x55604fce0030->Object::Object { 0x55604fcdff40, 0x55604fcdff90, 0x55604fcdffe0 } +Next token is token 'a' (0x55604fce0030 'a') +Shifting token 'a' (0x55604fce0030 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ff4daec030 'a') --> $$ = nterm item (0x55ff4daec030 'a') + $1 = token 'a' (0x55604fce0030 'a') +-> $$ = nterm item (0x55604fce0030 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x55ff4daec080->Object::Object { 0x55ff4daebf40, 0x55ff4daebf90, 0x55ff4daebfe0, 0x55ff4daec030 } -Next token is token 'p' (0x55ff4daec080 'p'Exception caught: cleaning lookahead and stack -0x55ff4daec080->Object::~Object { 0x55ff4daebf40, 0x55ff4daebf90, 0x55ff4daebfe0, 0x55ff4daec030, 0x55ff4daec080 } -0x55ff4daec030->Object::~Object { 0x55ff4daebf40, 0x55ff4daebf90, 0x55ff4daebfe0, 0x55ff4daec030 } -0x55ff4daebfe0->Object::~Object { 0x55ff4daebf40, 0x55ff4daebf90, 0x55ff4daebfe0 } -0x55ff4daebf90->Object::~Object { 0x55ff4daebf40, 0x55ff4daebf90 } -0x55ff4daebf40->Object::~Object { 0x55ff4daebf40 } +0x55604fce0080->Object::Object { 0x55604fcdff40, 0x55604fcdff90, 0x55604fcdffe0, 0x55604fce0030 } +Next token is token 'p' (0x55604fce0080 'p'Exception caught: cleaning lookahead and stack +0x55604fce0080->Object::~Object { 0x55604fcdff40, 0x55604fcdff90, 0x55604fcdffe0, 0x55604fce0030, 0x55604fce0080 } +0x55604fce0030->Object::~Object { 0x55604fcdff40, 0x55604fcdff90, 0x55604fcdffe0, 0x55604fce0030 } +0x55604fcdffe0->Object::~Object { 0x55604fcdff40, 0x55604fcdff90, 0x55604fcdffe0 } +0x55604fcdff90->Object::~Object { 0x55604fcdff40, 0x55604fcdff90 } +0x55604fcdff40->Object::~Object { 0x55604fcdff40 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr +error: invalid character +stdout: +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in stdout: exception caught: printer ./c++.at:1361: $PREPARSER ./input aaaae stderr: +stderr: +======== Testing with C++ standard flags: '' +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: syntax error +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in ./c++.at:1361: $PREPARSER ./input aaaaE stderr: +stderr: exception caught: syntax error, unexpected end of file, expecting 'a' +error: invalid expression ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in ./c++.at:1361: $PREPARSER ./input aaaaT stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaaR stderr: +======== Testing with C++ standard flags: '' ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:856: $PREPARSER ./input -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./glr-regression.at:740: $PREPARSER ./glr-regr6 -stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous +./c++.at:566: $here/modern stderr: stdout: -./c++.at:659: $PREPARSER ./input -./glr-regression.at:740: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -734. glr-regression.at:740: ok -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - -766. glr-regression.at:1965: testing Uninitialized location when reporting ambiguity: glr.cc ... -./glr-regression.at:1965: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: stdout: -./c++.at:574: $here/modern +./c++.at:568: $here/modern +Modern C++: 201402 +./c++.at:566: $PREPARSER ./list stdout: -Modern C++: 201103 -./c++.at:574: $PREPARSER ./list +Modern C++: 201402 stderr: -./glr-regression.at:1965: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS +./c++.at:568: $PREPARSER ./list +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Destroy: "" Destroy: "" @@ -266308,519 +266796,267 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) -stdout: -./glr-regression.at:1785: $PREPARSER ./glr-regr15 -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Ambiguity detected. -Option 1, - ambiguity -> - ambiguity1 -> - -Option 2, - ambiguity -> - ambiguity2 -> - -syntax is ambiguous -./glr-regression.at:1785: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -759. glr-regression.at:1785: ok -stdout: -./glr-regression.at:1038: $PREPARSER ./glr-regr9 -stderr: -memory exhausted -./glr-regression.at:1038: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -743. glr-regression.at:1038: ok - -767. glr-regression.at:1966: testing Uninitialized location when reporting ambiguity: glr2.cc ... -./glr-regression.at:1966: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -768. glr-regression.at:2035: testing Missed %merge type warnings when LHS type is declared later: glr.c ... -./glr-regression.at:2035: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y -768. glr-regression.at:2035: ok -./glr-regression.at:1966: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: - stdout: -./c++.at:1362: $PREPARSER ./input aaaas +./c++.at:1360: $PREPARSER ./input aaaas stderr: exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaal stderr: exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input i +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -769. glr-regression.at:2036: testing Missed %merge type warnings when LHS type is declared later: glr.cc ... -./glr-regression.at:2036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y -./c++.at:1362: $PREPARSER ./input aaaap +stdout: +./c++.at:235: $PREPARSER ./list +./c++.at:1360: $PREPARSER ./input i stderr: stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: initial-action +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: -./c++.at:1362: $PREPARSER ./input --debug aaaap -./glr-regression.at:1104: $PREPARSER ./glr-regr10 -769. glr-regression.at:2036: ok +======== Testing with C++ standard flags: '' +./c++.at:1066: $PREPARSER ./input < in +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1360: $PREPARSER ./input aaaap stderr: -./glr-regression.at:1104: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input --debug aaaap +./c++.at:1066: $PREPARSER ./input < in stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffe6fdc76c7->Object::Object { } -0x7ffe6fdc7760->Object::Object { 0x7ffe6fdc76c7 } -0x7ffe6fdc76c7->Object::~Object { 0x7ffe6fdc76c7, 0x7ffe6fdc7760 } -Next token is token 'a' (0x7ffe6fdc7760 'a') -0x7ffe6fdc76a0->Object::Object { 0x7ffe6fdc7760 } -0x7ffe6fdc7760->Object::~Object { 0x7ffe6fdc76a0, 0x7ffe6fdc7760 } -Shifting token 'a' (0x7ffe6fdc76a0 'a') -0x564e47f8f2e0->Object::Object { 0x7ffe6fdc76a0 } -0x7ffe6fdc76a0->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc76a0 } +0x55d75ee3bf40->Object::Object { } +Next token is token 'a' (0x55d75ee3bf40 'a') +Shifting token 'a' (0x55d75ee3bf40 'a') Entering state 2 Stack now 0 2 -0x7ffe6fdc7780->Object::Object { 0x564e47f8f2e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x564e47f8f2e0 'a') --> $$ = nterm item (0x7ffe6fdc7780 'a') -0x564e47f8f2e0->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc7780 } -0x564e47f8f2e0->Object::Object { 0x7ffe6fdc7780 } -0x7ffe6fdc7780->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc7780 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d75ee3bf40 'a') +-> $$ = nterm item (0x55d75ee3bf40 'a') Entering state 11 Stack now 0 11 Reading a token -0x7ffe6fdc76c7->Object::Object { 0x564e47f8f2e0 } -0x7ffe6fdc7760->Object::Object { 0x564e47f8f2e0, 0x7ffe6fdc76c7 } -0x7ffe6fdc76c7->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc76c7, 0x7ffe6fdc7760 } -Next token is token 'a' (0x7ffe6fdc7760 'a') -0x7ffe6fdc76a0->Object::Object { 0x564e47f8f2e0, 0x7ffe6fdc7760 } -0x7ffe6fdc7760->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc76a0, 0x7ffe6fdc7760 } -Shifting token 'a' (0x7ffe6fdc76a0 'a') -0x564e47f8f300->Object::Object { 0x564e47f8f2e0, 0x7ffe6fdc76a0 } -0x7ffe6fdc76a0->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc76a0 } +0x55d75ee3bf90->Object::Object { 0x55d75ee3bf40 } +Next token is token 'a' (0x55d75ee3bf90 'a') +Shifting token 'a' (0x55d75ee3bf90 'a') Entering state 2 Stack now 0 11 2 -0x7ffe6fdc7780->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x564e47f8f300 'a') --> $$ = nterm item (0x7ffe6fdc7780 'a') -0x564e47f8f300->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc7780 } -0x564e47f8f300->Object::Object { 0x564e47f8f2e0, 0x7ffe6fdc7780 } -0x7ffe6fdc7780->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc7780 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d75ee3bf90 'a') +-> $$ = nterm item (0x55d75ee3bf90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x7ffe6fdc76c7->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300 } -0x7ffe6fdc7760->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc76c7 } -0x7ffe6fdc76c7->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc76c7, 0x7ffe6fdc7760 } -Next token is token 'a' (0x7ffe6fdc7760 'a') -0x7ffe6fdc76a0->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc7760 } -0x7ffe6fdc7760->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc76a0, 0x7ffe6fdc7760 } -Shifting token 'a' (0x7ffe6fdc76a0 'a') -0x564e47f8f320->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc76a0 } -0x7ffe6fdc76a0->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc76a0 } +0x55d75ee3bfe0->Object::Object { 0x55d75ee3bf40, 0x55d75ee3bf90 } +Next token is token 'a' (0x55d75ee3bfe0 'a') +Shifting token 'a' (0x55d75ee3bfe0 'a') Entering state 2 Stack now 0 11 11 2 -0x7ffe6fdc7780->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x564e47f8f320 'a') --> $$ = nterm item (0x7ffe6fdc7780 'a') -0x564e47f8f320->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc7780 } -0x564e47f8f320->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc7780 } -0x7ffe6fdc7780->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc7780 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d75ee3bfe0 'a') +-> $$ = nterm item (0x55d75ee3bfe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffe6fdc76c7->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320 } -0x7ffe6fdc7760->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc76c7 } -0x7ffe6fdc76c7->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc76c7, 0x7ffe6fdc7760 } -Next token is token 'a' (0x7ffe6fdc7760 'a') -0x7ffe6fdc76a0->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc7760 } -0x7ffe6fdc7760->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc76a0, 0x7ffe6fdc7760 } -Shifting token 'a' (0x7ffe6fdc76a0 'a') -0x564e47f8f340->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc76a0 } -0x7ffe6fdc76a0->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc76a0 } +0x55d75ee3c030->Object::Object { 0x55d75ee3bf40, 0x55d75ee3bf90, 0x55d75ee3bfe0 } +Next token is token 'a' (0x55d75ee3c030 'a') +Shifting token 'a' (0x55d75ee3c030 'a') Entering state 2 Stack now 0 11 11 11 2 -0x7ffe6fdc7780->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x564e47f8f340 'a') --> $$ = nterm item (0x7ffe6fdc7780 'a') -0x564e47f8f340->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc7780 } -0x564e47f8f340->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc7780 } -0x7ffe6fdc7780->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc7780 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d75ee3c030 'a') +-> $$ = nterm item (0x55d75ee3c030 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffe6fdc76c7->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340 } -0x7ffe6fdc7760->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc76c7 } -0x7ffe6fdc76c7->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc76c7, 0x7ffe6fdc7760 } -Next token is token 'p' (0x7ffe6fdc7760 'p'Exception caught: cleaning lookahead and stack -0x564e47f8f340->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc7760 } -0x564e47f8f320->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc7760 } -0x564e47f8f300->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc7760 } -0x564e47f8f2e0->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc7760 } -0x7ffe6fdc7760->Object::~Object { 0x7ffe6fdc7760 } +0x55d75ee3c080->Object::Object { 0x55d75ee3bf40, 0x55d75ee3bf90, 0x55d75ee3bfe0, 0x55d75ee3c030 } +Next token is token 'p' (0x55d75ee3c080 'p'Exception caught: cleaning lookahead and stack +0x55d75ee3c080->Object::~Object { 0x55d75ee3bf40, 0x55d75ee3bf90, 0x55d75ee3bfe0, 0x55d75ee3c030, 0x55d75ee3c080 } +0x55d75ee3c030->Object::~Object { 0x55d75ee3bf40, 0x55d75ee3bf90, 0x55d75ee3bfe0, 0x55d75ee3c030 } +0x55d75ee3bfe0->Object::~Object { 0x55d75ee3bf40, 0x55d75ee3bf90, 0x55d75ee3bfe0 } +0x55d75ee3bf90->Object::~Object { 0x55d75ee3bf40, 0x55d75ee3bf90 } +0x55d75ee3bf40->Object::~Object { 0x55d75ee3bf40 } exception caught: printer end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -746. glr-regression.at:1104: ok Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffe6fdc76c7->Object::Object { } -0x7ffe6fdc7760->Object::Object { 0x7ffe6fdc76c7 } -0x7ffe6fdc76c7->Object::~Object { 0x7ffe6fdc76c7, 0x7ffe6fdc7760 } -Next token is token 'a' (0x7ffe6fdc7760 'a') -0x7ffe6fdc76a0->Object::Object { 0x7ffe6fdc7760 } -0x7ffe6fdc7760->Object::~Object { 0x7ffe6fdc76a0, 0x7ffe6fdc7760 } -Shifting token 'a' (0x7ffe6fdc76a0 'a') -0x564e47f8f2e0->Object::Object { 0x7ffe6fdc76a0 } -0x7ffe6fdc76a0->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc76a0 } +0x55d75ee3bf40->Object::Object { } +Next token is token 'a' (0x55d75ee3bf40 'a') +Shifting token 'a' (0x55d75ee3bf40 'a') Entering state 2 Stack now 0 2 -0x7ffe6fdc7780->Object::Object { 0x564e47f8f2e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x564e47f8f2e0 'a') --> $$ = nterm item (0x7ffe6fdc7780 'a') -0x564e47f8f2e0->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc7780 } -0x564e47f8f2e0->Object::Object { 0x7ffe6fdc7780 } -0x7ffe6fdc7780->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc7780 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d75ee3bf40 'a') +-> $$ = nterm item (0x55d75ee3bf40 'a') Entering state 11 Stack now 0 11 Reading a token -0x7ffe6fdc76c7->Object::Object { 0x564e47f8f2e0 } -0x7ffe6fdc7760->Object::Object { 0x564e47f8f2e0, 0x7ffe6fdc76c7 } -0x7ffe6fdc76c7->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc76c7, 0x7ffe6fdc7760 } -Next token is token 'a' (0x7ffe6fdc7760 'a') -0x7ffe6fdc76a0->Object::Object { 0x564e47f8f2e0, 0x7ffe6fdc7760 } -0x7ffe6fdc7760->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc76a0, 0x7ffe6fdc7760 } -Shifting token 'a' (0x7ffe6fdc76a0 'a') -0x564e47f8f300->Object::Object { 0x564e47f8f2e0, 0x7ffe6fdc76a0 } -0x7ffe6fdc76a0->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc76a0 } +0x55d75ee3bf90->Object::Object { 0x55d75ee3bf40 } +Next token is token 'a' (0x55d75ee3bf90 'a') +Shifting token 'a' (0x55d75ee3bf90 'a') Entering state 2 Stack now 0 11 2 -0x7ffe6fdc7780->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x564e47f8f300 'a') --> $$ = nterm item (0x7ffe6fdc7780 'a') -0x564e47f8f300->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc7780 } -0x564e47f8f300->Object::Object { 0x564e47f8f2e0, 0x7ffe6fdc7780 } -0x7ffe6fdc7780->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc7780 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d75ee3bf90 'a') +-> $$ = nterm item (0x55d75ee3bf90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x7ffe6fdc76c7->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300 } -0x7ffe6fdc7760->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc76c7 } -0x7ffe6fdc76c7->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc76c7, 0x7ffe6fdc7760 } -Next token is token 'a' (0x7ffe6fdc7760 'a') -0x7ffe6fdc76a0->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc7760 } -0x7ffe6fdc7760->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc76a0, 0x7ffe6fdc7760 } -Shifting token 'a' (0x7ffe6fdc76a0 'a') -0x564e47f8f320->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc76a0 } -0x7ffe6fdc76a0->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc76a0 } +0x55d75ee3bfe0->Object::Object { 0x55d75ee3bf40, 0x55d75ee3bf90 } +Next token is token 'a' (0x55d75ee3bfe0 'a') +Shifting token 'a' (0x55d75ee3bfe0 'a') Entering state 2 Stack now 0 11 11 2 -0x7ffe6fdc7780->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x564e47f8f320 'a') --> $$ = nterm item (0x7ffe6fdc7780 'a') -0x564e47f8f320->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc7780 } -0x564e47f8f320->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc7780 } -0x7ffe6fdc7780->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc7780 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d75ee3bfe0 'a') +-> $$ = nterm item (0x55d75ee3bfe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffe6fdc76c7->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320 } -0x7ffe6fdc7760->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc76c7 } -0x7ffe6fdc76c7->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc76c7, 0x7ffe6fdc7760 } -Next token is token 'a' (0x7ffe6fdc7760 'a') -0x7ffe6fdc76a0->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc7760 } -0x7ffe6fdc7760->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc76a0, 0x7ffe6fdc7760 } -Shifting token 'a' (0x7ffe6fdc76a0 'a') -0x564e47f8f340->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc76a0 } -0x7ffe6fdc76a0->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc76a0 } +0x55d75ee3c030->Object::Object { 0x55d75ee3bf40, 0x55d75ee3bf90, 0x55d75ee3bfe0 } +Next token is token 'a' (0x55d75ee3c030 'a') +Shifting token 'a' (0x55d75ee3c030 'a') Entering state 2 Stack now 0 11 11 11 2 -0x7ffe6fdc7780->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x564e47f8f340 'a') --> $$ = nterm item (0x7ffe6fdc7780 'a') -0x564e47f8f340->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc7780 } -0x564e47f8f340->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc7780 } -0x7ffe6fdc7780->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc7780 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d75ee3c030 'a') +-> $$ = nterm item (0x55d75ee3c030 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffe6fdc76c7->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340 } -0x7ffe6fdc7760->Object::Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc76c7 } -0x7ffe6fdc76c7->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc76c7, 0x7ffe6fdc7760 } -Next token is token 'p' (0x7ffe6fdc7760 'p'Exception caught: cleaning lookahead and stack -0x564e47f8f340->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x564e47f8f340, 0x7ffe6fdc7760 } -0x564e47f8f320->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x564e47f8f320, 0x7ffe6fdc7760 } -0x564e47f8f300->Object::~Object { 0x564e47f8f2e0, 0x564e47f8f300, 0x7ffe6fdc7760 } -0x564e47f8f2e0->Object::~Object { 0x564e47f8f2e0, 0x7ffe6fdc7760 } -0x7ffe6fdc7760->Object::~Object { 0x7ffe6fdc7760 } +0x55d75ee3c080->Object::Object { 0x55d75ee3bf40, 0x55d75ee3bf90, 0x55d75ee3bfe0, 0x55d75ee3c030 } +Next token is token 'p' (0x55d75ee3c080 'p'Exception caught: cleaning lookahead and stack +0x55d75ee3c080->Object::~Object { 0x55d75ee3bf40, 0x55d75ee3bf90, 0x55d75ee3bfe0, 0x55d75ee3c030, 0x55d75ee3c080 } +0x55d75ee3c030->Object::~Object { 0x55d75ee3bf40, 0x55d75ee3bf90, 0x55d75ee3bfe0, 0x55d75ee3c030 } +0x55d75ee3bfe0->Object::~Object { 0x55d75ee3bf40, 0x55d75ee3bf90, 0x55d75ee3bfe0 } +0x55d75ee3bf90->Object::~Object { 0x55d75ee3bf40, 0x55d75ee3bf90 } +0x55d75ee3bf40->Object::~Object { 0x55d75ee3bf40 } exception caught: printer end { } -./c++.at:1362: grep '^exception caught: printer$' stderr +./c++.at:1360: grep '^exception caught: printer$' stderr +./c++.at:1066: $PREPARSER ./input < in stdout: exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae - -stderr: - -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -771. glr-regression.at:2149: testing Ambiguity reports: glr.c ... -./glr-regression.at:2149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -770. glr-regression.at:2037: testing Missed %merge type warnings when LHS type is declared later: glr2.cc ... -./glr-regression.at:2037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y -stderr: -stdout: -./c++.at:1362: $PREPARSER ./input aaaaT -./glr-regression.at:1679: $PREPARSER ./glr-regr14 -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1362: $PREPARSER ./input aaaaR -770. glr-regression.at:2037: ok -./glr-regression.at:1679: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:92: $PREPARSER ./input -757. glr-regression.at:1679: stderr: - ok -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS - - -663. c++.at:26: ok -./glr-regression.at:2149: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -772. glr-regression.at:2150: testing Ambiguity reports: glr.cc ... -./glr-regression.at:2150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: -./glr-regression.at:1860: $PREPARSER ./glr-regr16 -774. glr-regression.at:2229: testing Predicates: glr.c ... -./glr-regression.at:2229: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stderr: -syntax error -./glr-regression.at:1860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -773. glr-regression.at:2151: testing Ambiguity reports: glr2.cc ... -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -762. glr-regression.at:1860: ./glr-regression.at:2151: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - ok - -./glr-regression.at:2150: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./glr-regression.at:2229: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./glr-regression.at:2151: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -775. glr-regression.at:2230: testing Predicates: glr.cc ... -./glr-regression.at:2230: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: -./glr-regression.at:1861: $PREPARSER ./glr-regr16 -stderr: -syntax error -./glr-regression.at:1861: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -763. glr-regression.at:1861: ok -./glr-regression.at:2230: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS - -stderr: -stdout: -./glr-regression.at:1964: $PREPARSER ./glr-regr17 -stderr: -Ambiguity detected. -Option 1, - start -> - ambig1 -> - sub_ambig2 -> - empty2 -> - 'a' - 'b' - empty1 -> - -Option 2, - start -> - ambig2 -> - sub_ambig2 -> - empty2 -> - 'a' - 'b' - empty2 -> - -1.1-2.2: syntax is ambiguous -./glr-regression.at:1964: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaae stderr: -stdout: -./glr-regression.at:946: $PREPARSER ./glr-regr8 -765. glr-regression.at:1964: stderr: - ok -./glr-regression.at:946: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: stderr: -./c++.at:856: $PREPARSER ./input +exception caught: syntax error stdout: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:941: $PREPARSER ./input +======== Testing with C++ standard flags: '' stderr: -740. glr-regression.at:946: ok -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./c++.at:1360: $PREPARSER ./input aaaaE syntax error Discarding 'a'. Reducing 'a'. ./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -776. glr-regression.at:2231: testing Predicates: glr2.cc ... -./glr-regression.at:2231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 681. c++.at:884: ok -./glr-regression.at:2231: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1360: $PREPARSER ./input aaaaT stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1360: $PREPARSER ./input aaaaR stdout: -./c++.at:850: $PREPARSER ./input +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -stderr: -./c++.at:235: $PREPARSER ./list +./c++.at:567: $here/modern stdout: -./glr-regression.at:1176: $PREPARSER ./glr-regr11 -stderr: -./glr-regression.at:1176: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -749. glr-regression.at:1176: ======== Testing with C++ standard flags: '' - ok -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +Modern C++: 201402 +./c++.at:567: $PREPARSER ./list stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./glr-regression.at:1786: $PREPARSER ./glr-regr15 +./c++.at:856: $PREPARSER ./input stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -Ambiguity detected. -Option 1, - ambiguity -> - ambiguity1 -> - -Option 2, - ambiguity -> - ambiguity2 -> - -syntax is ambiguous -./glr-regression.at:1786: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1312: $PREPARSER ./glr-regr12 -stderr: -./glr-regression.at:1312: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -760. glr-regression.at:1786: ok -752. glr-regression.at:1312: ok stderr: +./c++.at:570: $here/modern stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaas stderr: stdout: -./glr-regression.at:2149: $PREPARSER ./input --debug +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stdout: +Modern C++: 201402 stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token 'a' () -Shifting token 'a' () -Entering state 1 -Reading a token -Next token is token 'b' () -Shifting token 'b' () -Entering state 3 -Reducing stack 0 by rule 3 (line 30): - $1 = token 'b' () --> $$ = nterm b () -Entering state 4 -Reading a token -Next token is token 'c' () -Shifting token 'c' () -Entering state 6 -Reducing stack 0 by rule 4 (line 31): --> $$ = nterm d () -Entering state 7 -Reading a token -Now at end of input. -Stack 0 Entering state 7 -Now at end of input. -Splitting off stack 1 from 0. -Reduced stack 1 by rule 2 (line 28); action deferred. Now in state 2. -Stack 1 Entering state 2 -Now at end of input. -Reduced stack 0 by rule 1 (line 27); action deferred. Now in state 2. -Merging stack 0 into stack 1. -Stack 1 Entering state 2 -Now at end of input. -Removing dead stacks. -Rename stack 1 -> 0. -On stack 0, shifting token "end of file" () -Stack 0 now in state 5 -Ambiguity detected. -Option 1, - start -> - 'a' - b - 'c' - d - -Option 2, - start -> - 'a' - b - 'c' - d - -syntax is ambiguous -Cleanup: popping token "end of file" () -Cleanup: popping unresolved nterm start () -Cleanup: popping nterm d () -Cleanup: popping token 'c' () -Cleanup: popping nterm b () -Cleanup: popping token 'a' () -./glr-regression.at:2149: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -771. glr-regression.at:2149: ok +./c++.at:570: $PREPARSER ./list +exception caught: reduction stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:566: $here/modern -stdout: -Modern C++: 201703 -./c++.at:566: $PREPARSER ./list stderr: +./c++.at:850: $PREPARSER ./input Destroy: "0" Destroy: "0" Destroy: 1 @@ -266842,15 +267078,267 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:568: $here/modern +./c++.at:1362: $PREPARSER ./input i +stderr: +stdout: +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +stderr: +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaap +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffe8a6d0ab7->Object::Object { } +0x7ffe8a6d0b50->Object::Object { 0x7ffe8a6d0ab7 } +0x7ffe8a6d0ab7->Object::~Object { 0x7ffe8a6d0ab7, 0x7ffe8a6d0b50 } +Next token is token 'a' (0x7ffe8a6d0b50 'a') +0x7ffe8a6d0a90->Object::Object { 0x7ffe8a6d0b50 } +0x7ffe8a6d0b50->Object::~Object { 0x7ffe8a6d0a90, 0x7ffe8a6d0b50 } +Shifting token 'a' (0x7ffe8a6d0a90 'a') +0x555c09ebc2e0->Object::Object { 0x7ffe8a6d0a90 } +0x7ffe8a6d0a90->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0a90 } +Entering state 2 +Stack now 0 2 +0x7ffe8a6d0b70->Object::Object { 0x555c09ebc2e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x555c09ebc2e0 'a') +-> $$ = nterm item (0x7ffe8a6d0b70 'a') +0x555c09ebc2e0->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0b70 } +0x555c09ebc2e0->Object::Object { 0x7ffe8a6d0b70 } +0x7ffe8a6d0b70->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0b70 } +Entering state 11 +Stack now 0 11 +Reading a token +0x7ffe8a6d0ab7->Object::Object { 0x555c09ebc2e0 } +0x7ffe8a6d0b50->Object::Object { 0x555c09ebc2e0, 0x7ffe8a6d0ab7 } +0x7ffe8a6d0ab7->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0ab7, 0x7ffe8a6d0b50 } +Next token is token 'a' (0x7ffe8a6d0b50 'a') +0x7ffe8a6d0a90->Object::Object { 0x555c09ebc2e0, 0x7ffe8a6d0b50 } +0x7ffe8a6d0b50->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0a90, 0x7ffe8a6d0b50 } +Shifting token 'a' (0x7ffe8a6d0a90 'a') +0x555c09ebc300->Object::Object { 0x555c09ebc2e0, 0x7ffe8a6d0a90 } +0x7ffe8a6d0a90->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0a90 } +Entering state 2 +Stack now 0 11 2 +0x7ffe8a6d0b70->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x555c09ebc300 'a') +-> $$ = nterm item (0x7ffe8a6d0b70 'a') +0x555c09ebc300->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0b70 } +0x555c09ebc300->Object::Object { 0x555c09ebc2e0, 0x7ffe8a6d0b70 } +0x7ffe8a6d0b70->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0b70 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0x7ffe8a6d0ab7->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300 } +0x7ffe8a6d0b50->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0ab7 } +0x7ffe8a6d0ab7->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0ab7, 0x7ffe8a6d0b50 } +Next token is token 'a' (0x7ffe8a6d0b50 'a') +0x7ffe8a6d0a90->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0b50 } +0x7ffe8a6d0b50->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0a90, 0x7ffe8a6d0b50 } +Shifting token 'a' (0x7ffe8a6d0a90 'a') +0x555c09ebc320->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0a90 } +0x7ffe8a6d0a90->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0a90 } +Entering state 2 +Stack now 0 11 11 2 +0x7ffe8a6d0b70->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x555c09ebc320 'a') +-> $$ = nterm item (0x7ffe8a6d0b70 'a') +0x555c09ebc320->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0b70 } +0x555c09ebc320->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0b70 } +0x7ffe8a6d0b70->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0b70 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x7ffe8a6d0ab7->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320 } +0x7ffe8a6d0b50->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0ab7 } +0x7ffe8a6d0ab7->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0ab7, 0x7ffe8a6d0b50 } +Next token is token 'a' (0x7ffe8a6d0b50 'a') +0x7ffe8a6d0a90->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0b50 } +0x7ffe8a6d0b50->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0a90, 0x7ffe8a6d0b50 } +Shifting token 'a' (0x7ffe8a6d0a90 'a') +0x555c09ebc340->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0a90 } +0x7ffe8a6d0a90->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0a90 } +Entering state 2 +Stack now 0 11 11 11 2 +0x7ffe8a6d0b70->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x555c09ebc340 'a') +-> $$ = nterm item (0x7ffe8a6d0b70 'a') +0x555c09ebc340->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0b70 } +0x555c09ebc340->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0b70 } +0x7ffe8a6d0b70->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0b70 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x7ffe8a6d0ab7->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340 } +0x7ffe8a6d0b50->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0ab7 } +0x7ffe8a6d0ab7->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0ab7, 0x7ffe8a6d0b50 } +Next token is token 'p' (0x7ffe8a6d0b50 'p'Exception caught: cleaning lookahead and stack +0x555c09ebc340->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0b50 } +0x555c09ebc320->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0b50 } +0x555c09ebc300->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0b50 } +0x555c09ebc2e0->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0b50 } +0x7ffe8a6d0b50->Object::~Object { 0x7ffe8a6d0b50 } +exception caught: printer +end { } +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffe8a6d0ab7->Object::Object { } +0x7ffe8a6d0b50->Object::Object { 0x7ffe8a6d0ab7 } +0x7ffe8a6d0ab7->Object::~Object { 0x7ffe8a6d0ab7, 0x7ffe8a6d0b50 } +Next token is token 'a' (0x7ffe8a6d0b50 'a') +0x7ffe8a6d0a90->Object::Object { 0x7ffe8a6d0b50 } +0x7ffe8a6d0b50->Object::~Object { 0x7ffe8a6d0a90, 0x7ffe8a6d0b50 } +Shifting token 'a' (0x7ffe8a6d0a90 'a') +0x555c09ebc2e0->Object::Object { 0x7ffe8a6d0a90 } +0x7ffe8a6d0a90->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0a90 } +Entering state 2 +Stack now 0 2 +0x7ffe8a6d0b70->Object::Object { 0x555c09ebc2e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x555c09ebc2e0 'a') +-> $$ = nterm item (0x7ffe8a6d0b70 'a') +0x555c09ebc2e0->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0b70 } +0x555c09ebc2e0->Object::Object { 0x7ffe8a6d0b70 } +0x7ffe8a6d0b70->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0b70 } +Entering state 11 +Stack now 0 11 +Reading a token +0x7ffe8a6d0ab7->Object::Object { 0x555c09ebc2e0 } +0x7ffe8a6d0b50->Object::Object { 0x555c09ebc2e0, 0x7ffe8a6d0ab7 } +0x7ffe8a6d0ab7->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0ab7, 0x7ffe8a6d0b50 } +Next token is token 'a' (0x7ffe8a6d0b50 'a') +0x7ffe8a6d0a90->Object::Object { 0x555c09ebc2e0, 0x7ffe8a6d0b50 } +0x7ffe8a6d0b50->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0a90, 0x7ffe8a6d0b50 } +Shifting token 'a' (0x7ffe8a6d0a90 'a') +0x555c09ebc300->Object::Object { 0x555c09ebc2e0, 0x7ffe8a6d0a90 } +0x7ffe8a6d0a90->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0a90 } +Entering state 2 +Stack now 0 11 2 +0x7ffe8a6d0b70->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x555c09ebc300 'a') +-> $$ = nterm item (0x7ffe8a6d0b70 'a') +0x555c09ebc300->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0b70 } +0x555c09ebc300->Object::Object { 0x555c09ebc2e0, 0x7ffe8a6d0b70 } +0x7ffe8a6d0b70->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0b70 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0x7ffe8a6d0ab7->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300 } +0x7ffe8a6d0b50->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0ab7 } +0x7ffe8a6d0ab7->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0ab7, 0x7ffe8a6d0b50 } +Next token is token 'a' (0x7ffe8a6d0b50 'a') +0x7ffe8a6d0a90->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0b50 } +0x7ffe8a6d0b50->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0a90, 0x7ffe8a6d0b50 } +Shifting token 'a' (0x7ffe8a6d0a90 'a') +0x555c09ebc320->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0a90 } +0x7ffe8a6d0a90->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0a90 } +Entering state 2 +Stack now 0 11 11 2 +0x7ffe8a6d0b70->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x555c09ebc320 'a') +-> $$ = nterm item (0x7ffe8a6d0b70 'a') +0x555c09ebc320->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0b70 } +0x555c09ebc320->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0b70 } +0x7ffe8a6d0b70->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0b70 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x7ffe8a6d0ab7->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320 } +0x7ffe8a6d0b50->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0ab7 } +0x7ffe8a6d0ab7->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0ab7, 0x7ffe8a6d0b50 } +Next token is token 'a' (0x7ffe8a6d0b50 'a') +0x7ffe8a6d0a90->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0b50 } +0x7ffe8a6d0b50->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0a90, 0x7ffe8a6d0b50 } +Shifting token 'a' (0x7ffe8a6d0a90 'a') +0x555c09ebc340->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0a90 } +0x7ffe8a6d0a90->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0a90 } +Entering state 2 +Stack now 0 11 11 11 2 +0x7ffe8a6d0b70->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x555c09ebc340 'a') +-> $$ = nterm item (0x7ffe8a6d0b70 'a') +0x555c09ebc340->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0b70 } +0x555c09ebc340->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0b70 } +0x7ffe8a6d0b70->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0b70 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x7ffe8a6d0ab7->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340 } +0x7ffe8a6d0b50->Object::Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0ab7 } +0x7ffe8a6d0ab7->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0ab7, 0x7ffe8a6d0b50 } +Next token is token 'p' (0x7ffe8a6d0b50 'p'Exception caught: cleaning lookahead and stack +0x555c09ebc340->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x555c09ebc340, 0x7ffe8a6d0b50 } +0x555c09ebc320->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x555c09ebc320, 0x7ffe8a6d0b50 } +0x555c09ebc300->Object::~Object { 0x555c09ebc2e0, 0x555c09ebc300, 0x7ffe8a6d0b50 } +0x555c09ebc2e0->Object::~Object { 0x555c09ebc2e0, 0x7ffe8a6d0b50 } +0x7ffe8a6d0b50->Object::~Object { 0x7ffe8a6d0b50 } +exception caught: printer +end { } +./c++.at:1362: grep '^exception caught: printer$' stderr +stderr: +stdout: +stdout: +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:574: $here/modern stdout: +stderr: Modern C++: 201402 -./c++.at:568: $PREPARSER ./list +./c++.at:574: $PREPARSER ./list +stdout: +./c++.at:856: $PREPARSER ./input stderr: stderr: Destroy: "" @@ -266874,14 +267362,51 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:569: $here/modern +stdout: +Modern C++: 201402 +./c++.at:569: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: stdout: ./c++.at:1363: $PREPARSER ./input aaaas -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: reduction ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:1363: $PREPARSER ./input aaaal stderr: exception caught: yylex @@ -266893,219 +267418,223 @@ ./c++.at:1363: $PREPARSER ./input aaaap stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: ./c++.at:1363: $PREPARSER ./input --debug aaaap -stdout: -./c++.at:567: $here/modern -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff9254b1bf->Object::Object { } -0x7fff9254b2a0->Object::Object { 0x7fff9254b1bf } -0x7fff9254b1bf->Object::~Object { 0x7fff9254b1bf, 0x7fff9254b2a0 } -Next token is token 'a' (0x7fff9254b2a0 'a') -0x7fff9254b1e0->Object::Object { 0x7fff9254b2a0 } -0x7fff9254b2a0->Object::~Object { 0x7fff9254b1e0, 0x7fff9254b2a0 } -Shifting token 'a' (0x7fff9254b1e0 'a') -0x560624eb82e0->Object::Object { 0x7fff9254b1e0 } -0x7fff9254b1e0->Object::~Object { 0x560624eb82e0, 0x7fff9254b1e0 } +0x7fffc32d0dff->Object::Object { } +0x7fffc32d0ee0->Object::Object { 0x7fffc32d0dff } +0x7fffc32d0dff->Object::~Object { 0x7fffc32d0dff, 0x7fffc32d0ee0 } +Next token is token 'a' (0x7fffc32d0ee0 'a') +0x7fffc32d0e20->Object::Object { 0x7fffc32d0ee0 } +0x7fffc32d0ee0->Object::~Object { 0x7fffc32d0e20, 0x7fffc32d0ee0 } +Shifting token 'a' (0x7fffc32d0e20 'a') +0x562161dc02e0->Object::Object { 0x7fffc32d0e20 } +0x7fffc32d0e20->Object::~Object { 0x562161dc02e0, 0x7fffc32d0e20 } Entering state 1 Stack now 0 1 -0x7fff9254b2c0->Object::Object { 0x560624eb82e0 } +0x7fffc32d0f00->Object::Object { 0x562161dc02e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560624eb82e0 'a') --> $$ = nterm item (0x7fff9254b2c0 'a') -0x560624eb82e0->Object::~Object { 0x560624eb82e0, 0x7fff9254b2c0 } -0x560624eb82e0->Object::Object { 0x7fff9254b2c0 } -0x7fff9254b2c0->Object::~Object { 0x560624eb82e0, 0x7fff9254b2c0 } + $1 = token 'a' (0x562161dc02e0 'a') +-> $$ = nterm item (0x7fffc32d0f00 'a') +0x562161dc02e0->Object::~Object { 0x562161dc02e0, 0x7fffc32d0f00 } +0x562161dc02e0->Object::Object { 0x7fffc32d0f00 } +0x7fffc32d0f00->Object::~Object { 0x562161dc02e0, 0x7fffc32d0f00 } Entering state 10 Stack now 0 10 Reading a token -0x7fff9254b1bf->Object::Object { 0x560624eb82e0 } -0x7fff9254b2a0->Object::Object { 0x560624eb82e0, 0x7fff9254b1bf } -0x7fff9254b1bf->Object::~Object { 0x560624eb82e0, 0x7fff9254b1bf, 0x7fff9254b2a0 } -Next token is token 'a' (0x7fff9254b2a0 'a') -0x7fff9254b1e0->Object::Object { 0x560624eb82e0, 0x7fff9254b2a0 } -0x7fff9254b2a0->Object::~Object { 0x560624eb82e0, 0x7fff9254b1e0, 0x7fff9254b2a0 } -Shifting token 'a' (0x7fff9254b1e0 'a') -0x560624eb8300->Object::Object { 0x560624eb82e0, 0x7fff9254b1e0 } -0x7fff9254b1e0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b1e0 } +0x7fffc32d0dff->Object::Object { 0x562161dc02e0 } +0x7fffc32d0ee0->Object::Object { 0x562161dc02e0, 0x7fffc32d0dff } +0x7fffc32d0dff->Object::~Object { 0x562161dc02e0, 0x7fffc32d0dff, 0x7fffc32d0ee0 } +Next token is token 'a' (0x7fffc32d0ee0 'a') +0x7fffc32d0e20->Object::Object { 0x562161dc02e0, 0x7fffc32d0ee0 } +0x7fffc32d0ee0->Object::~Object { 0x562161dc02e0, 0x7fffc32d0e20, 0x7fffc32d0ee0 } +Shifting token 'a' (0x7fffc32d0e20 'a') +0x562161dc0300->Object::Object { 0x562161dc02e0, 0x7fffc32d0e20 } +0x7fffc32d0e20->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0e20 } Entering state 1 Stack now 0 10 1 -0x7fff9254b2c0->Object::Object { 0x560624eb82e0, 0x560624eb8300 } +0x7fffc32d0f00->Object::Object { 0x562161dc02e0, 0x562161dc0300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560624eb8300 'a') --> $$ = nterm item (0x7fff9254b2c0 'a') -0x560624eb8300->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b2c0 } -0x560624eb8300->Object::Object { 0x560624eb82e0, 0x7fff9254b2c0 } -0x7fff9254b2c0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b2c0 } + $1 = token 'a' (0x562161dc0300 'a') +-> $$ = nterm item (0x7fffc32d0f00 'a') +0x562161dc0300->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0f00 } +0x562161dc0300->Object::Object { 0x562161dc02e0, 0x7fffc32d0f00 } +0x7fffc32d0f00->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0f00 } Entering state 10 Stack now 0 10 10 Reading a token -0x7fff9254b1bf->Object::Object { 0x560624eb82e0, 0x560624eb8300 } -0x7fff9254b2a0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b1bf } -0x7fff9254b1bf->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b1bf, 0x7fff9254b2a0 } -Next token is token 'a' (0x7fff9254b2a0 'a') -0x7fff9254b1e0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b2a0 } -0x7fff9254b2a0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b1e0, 0x7fff9254b2a0 } -Shifting token 'a' (0x7fff9254b1e0 'a') -0x560624eb8320->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b1e0 } -0x7fff9254b1e0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b1e0 } +0x7fffc32d0dff->Object::Object { 0x562161dc02e0, 0x562161dc0300 } +0x7fffc32d0ee0->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0dff } +0x7fffc32d0dff->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0dff, 0x7fffc32d0ee0 } +Next token is token 'a' (0x7fffc32d0ee0 'a') +0x7fffc32d0e20->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0ee0 } +0x7fffc32d0ee0->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0e20, 0x7fffc32d0ee0 } +Shifting token 'a' (0x7fffc32d0e20 'a') +0x562161dc0320->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0e20 } +0x7fffc32d0e20->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0e20 } Entering state 1 Stack now 0 10 10 1 -0x7fff9254b2c0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320 } +0x7fffc32d0f00->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560624eb8320 'a') --> $$ = nterm item (0x7fff9254b2c0 'a') -0x560624eb8320->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b2c0 } -0x560624eb8320->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b2c0 } -0x7fff9254b2c0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b2c0 } + $1 = token 'a' (0x562161dc0320 'a') +-> $$ = nterm item (0x7fffc32d0f00 'a') +0x562161dc0320->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0f00 } +0x562161dc0320->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0f00 } +0x7fffc32d0f00->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0f00 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7fff9254b1bf->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320 } -0x7fff9254b2a0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b1bf } -0x7fff9254b1bf->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b1bf, 0x7fff9254b2a0 } -Next token is token 'a' (0x7fff9254b2a0 'a') -0x7fff9254b1e0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b2a0 } -0x7fff9254b2a0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b1e0, 0x7fff9254b2a0 } -Shifting token 'a' (0x7fff9254b1e0 'a') -0x560624eb8340->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b1e0 } -0x7fff9254b1e0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b1e0 } +0x7fffc32d0dff->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320 } +0x7fffc32d0ee0->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0dff } +0x7fffc32d0dff->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0dff, 0x7fffc32d0ee0 } +Next token is token 'a' (0x7fffc32d0ee0 'a') +0x7fffc32d0e20->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0ee0 } +0x7fffc32d0ee0->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0e20, 0x7fffc32d0ee0 } +Shifting token 'a' (0x7fffc32d0e20 'a') +0x562161dc0340->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0e20 } +0x7fffc32d0e20->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0e20 } Entering state 1 Stack now 0 10 10 10 1 -0x7fff9254b2c0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340 } +0x7fffc32d0f00->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560624eb8340 'a') --> $$ = nterm item (0x7fff9254b2c0 'a') -0x560624eb8340->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b2c0 } -0x560624eb8340->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b2c0 } -0x7fff9254b2c0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b2c0 } + $1 = token 'a' (0x562161dc0340 'a') +-> $$ = nterm item (0x7fffc32d0f00 'a') +0x562161dc0340->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0f00 } +0x562161dc0340->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0f00 } +0x7fffc32d0f00->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0f00 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7fff9254b1bf->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340 } -0x7fff9254b2a0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b1bf } -0x7fff9254b1bf->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b1bf, 0x7fff9254b2a0 } -Next token is token 'p' (0x7fff9254b2a0 'p'Exception caught: cleaning lookahead and stack -0x560624eb8340->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b2a0 } -0x560624eb8320->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b2a0 } -0x560624eb8300->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b2a0 } -0x560624eb82e0->Object::~Object { 0x560624eb82e0, 0x7fff9254b2a0 } -0x7fff9254b2a0->Object::~Object { 0x7fff9254b2a0 } +0x7fffc32d0dff->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340 } +0x7fffc32d0ee0->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0dff } +0x7fffc32d0dff->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0dff, 0x7fffc32d0ee0 } +Next token is token 'p' (0x7fffc32d0ee0 'p'Exception caught: cleaning lookahead and stack +0x562161dc0340->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0ee0 } +0x562161dc0320->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0ee0 } +0x562161dc0300->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0ee0 } +0x562161dc02e0->Object::~Object { 0x562161dc02e0, 0x7fffc32d0ee0 } +0x7fffc32d0ee0->Object::~Object { 0x7fffc32d0ee0 } exception caught: printer end { } ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stdout: -Modern C++: 201402 -./c++.at:567: $PREPARSER ./list stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff9254b1bf->Object::Object { } -0x7fff9254b2a0->Object::Object { 0x7fff9254b1bf } -0x7fff9254b1bf->Object::~Object { 0x7fff9254b1bf, 0x7fff9254b2a0 } -Next token is token 'a' (0x7fff9254b2a0 'a') -0x7fff9254b1e0->Object::Object { 0x7fff9254b2a0 } -0x7fff9254b2a0->Object::~Object { 0x7fff9254b1e0, 0x7fff9254b2a0 } -Shifting token 'a' (0x7fff9254b1e0 'a') -0x560624eb82e0->Object::Object { 0x7fff9254b1e0 } -0x7fff9254b1e0->Object::~Object { 0x560624eb82e0, 0x7fff9254b1e0 } +0x7fffc32d0dff->Object::Object { } +0x7fffc32d0ee0->Object::Object { 0x7fffc32d0dff } +0x7fffc32d0dff->Object::~Object { 0x7fffc32d0dff, 0x7fffc32d0ee0 } +Next token is token 'a' (0x7fffc32d0ee0 'a') +0x7fffc32d0e20->Object::Object { 0x7fffc32d0ee0 } +0x7fffc32d0ee0->Object::~Object { 0x7fffc32d0e20, 0x7fffc32d0ee0 } +Shifting token 'a' (0x7fffc32d0e20 'a') +0x562161dc02e0->Object::Object { 0x7fffc32d0e20 } +0x7fffc32d0e20->Object::~Object { 0x562161dc02e0, 0x7fffc32d0e20 } Entering state 1 Stack now 0 1 -0x7fff9254b2c0->Object::Object { 0x560624eb82e0 } +0x7fffc32d0f00->Object::Object { 0x562161dc02e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560624eb82e0 'a') --> $$ = nterm item (0x7fff9254b2c0 'a') -0x560624eb82e0->Object::~Object { 0x560624eb82e0, 0x7fff9254b2c0 } -0x560624eb82e0->Object::Object { 0x7fff9254b2c0 } -0x7fff9254b2c0->Object::~Object { 0x560624eb82e0, 0x7fff9254b2c0 } + $1 = token 'a' (0x562161dc02e0 'a') +-> $$ = nterm item (0x7fffc32d0f00 'a') +0x562161dc02e0->Object::~Object { 0x562161dc02e0, 0x7fffc32d0f00 } +0x562161dc02e0->Object::Object { 0x7fffc32d0f00 } +0x7fffc32d0f00->Object::~Object { 0x562161dc02e0, 0x7fffc32d0f00 } Entering state 10 Stack now 0 10 Reading a token -0x7fff9254b1bf->Object::Object { 0x560624eb82e0 } -0x7fff9254b2a0->Object::Object { 0x560624eb82e0, 0x7fff9254b1bf } -0x7fff9254b1bf->Object::~Object { 0x560624eb82e0, 0x7fff9254b1bf, 0x7fff9254b2a0 } -Next token is token 'a' (0x7fff9254b2a0 'a') -0x7fff9254b1e0->Object::Object { 0x560624eb82e0, 0x7fff9254b2a0 } -0x7fff9254b2a0->Object::~Object { 0x560624eb82e0, 0x7fff9254b1e0, 0x7fff9254b2a0 } -Shifting token 'a' (0x7fff9254b1e0 'a') -0x560624eb8300->Object::Object { 0x560624eb82e0, 0x7fff9254b1e0 } -0x7fff9254b1e0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b1e0 } +0x7fffc32d0dff->Object::Object { 0x562161dc02e0 } +0x7fffc32d0ee0->Object::Object { 0x562161dc02e0, 0x7fffc32d0dff } +0x7fffc32d0dff->Object::~Object { 0x562161dc02e0, 0x7fffc32d0dff, 0x7fffc32d0ee0 } +Next token is token 'a' (0x7fffc32d0ee0 'a') +0x7fffc32d0e20->Object::Object { 0x562161dc02e0, 0x7fffc32d0ee0 } +0x7fffc32d0ee0->Object::~Object { 0x562161dc02e0, 0x7fffc32d0e20, 0x7fffc32d0ee0 } +Shifting token 'a' (0x7fffc32d0e20 'a') +0x562161dc0300->Object::Object { 0x562161dc02e0, 0x7fffc32d0e20 } +0x7fffc32d0e20->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0e20 } Entering state 1 Stack now 0 10 1 -0x7fff9254b2c0->Object::Object { 0x560624eb82e0, 0x560624eb8300 } +0x7fffc32d0f00->Object::Object { 0x562161dc02e0, 0x562161dc0300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560624eb8300 'a') --> $$ = nterm item (0x7fff9254b2c0 'a') -0x560624eb8300->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b2c0 } -0x560624eb8300->Object::Object { 0x560624eb82e0, 0x7fff9254b2c0 } -0x7fff9254b2c0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b2c0 } + $1 = token 'a' (0x562161dc0300 'a') +-> $$ = nterm item (0x7fffc32d0f00 'a') +0x562161dc0300->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0f00 } +0x562161dc0300->Object::Object { 0x562161dc02e0, 0x7fffc32d0f00 } +0x7fffc32d0f00->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0f00 } Entering state 10 Stack now 0 10 10 Reading a token -0x7fff9254b1bf->Object::Object { 0x560624eb82e0, 0x560624eb8300 } -0x7fff9254b2a0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b1bf } -0x7fff9254b1bf->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b1bf, 0x7fff9254b2a0 } -Next token is token 'a' (0x7fff9254b2a0 'a') -0x7fff9254b1e0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b2a0 } -0x7fff9254b2a0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b1e0, 0x7fff9254b2a0 } -Shifting token 'a' (0x7fff9254b1e0 'a') -0x560624eb8320->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b1e0 } -0x7fff9254b1e0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b1e0 } +0x7fffc32d0dff->Object::Object { 0x562161dc02e0, 0x562161dc0300 } +0x7fffc32d0ee0->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0dff } +0x7fffc32d0dff->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0dff, 0x7fffc32d0ee0 } +Next token is token 'a' (0x7fffc32d0ee0 'a') +0x7fffc32d0e20->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0ee0 } +0x7fffc32d0ee0->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0e20, 0x7fffc32d0ee0 } +Shifting token 'a' (0x7fffc32d0e20 'a') +0x562161dc0320->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0e20 } +0x7fffc32d0e20->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0e20 } Entering state 1 Stack now 0 10 10 1 -0x7fff9254b2c0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320 } +0x7fffc32d0f00->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560624eb8320 'a') --> $$ = nterm item (0x7fff9254b2c0 'a') -0x560624eb8320->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b2c0 } -0x560624eb8320->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b2c0 } -0x7fff9254b2c0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b2c0 } + $1 = token 'a' (0x562161dc0320 'a') +-> $$ = nterm item (0x7fffc32d0f00 'a') +0x562161dc0320->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0f00 } +0x562161dc0320->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0f00 } +0x7fffc32d0f00->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0f00 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7fff9254b1bf->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320 } -0x7fff9254b2a0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b1bf } -0x7fff9254b1bf->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b1bf, 0x7fff9254b2a0 } -Next token is token 'a' (0x7fff9254b2a0 'a') -0x7fff9254b1e0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b2a0 } -0x7fff9254b2a0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b1e0, 0x7fff9254b2a0 } -Shifting token 'a' (0x7fff9254b1e0 'a') -0x560624eb8340->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b1e0 } -0x7fff9254b1e0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b1e0 } +0x7fffc32d0dff->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320 } +0x7fffc32d0ee0->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0dff } +0x7fffc32d0dff->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0dff, 0x7fffc32d0ee0 } +Next token is token 'a' (0x7fffc32d0ee0 'a') +0x7fffc32d0e20->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0ee0 } +0x7fffc32d0ee0->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0e20, 0x7fffc32d0ee0 } +Shifting token 'a' (0x7fffc32d0e20 'a') +0x562161dc0340->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0e20 } +0x7fffc32d0e20->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0e20 } Entering state 1 Stack now 0 10 10 10 1 -0x7fff9254b2c0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340 } +0x7fffc32d0f00->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x560624eb8340 'a') --> $$ = nterm item (0x7fff9254b2c0 'a') -0x560624eb8340->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b2c0 } -0x560624eb8340->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b2c0 } -0x7fff9254b2c0->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b2c0 } + $1 = token 'a' (0x562161dc0340 'a') +-> $$ = nterm item (0x7fffc32d0f00 'a') +0x562161dc0340->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0f00 } +0x562161dc0340->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0f00 } +0x7fffc32d0f00->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0f00 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7fff9254b1bf->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340 } -0x7fff9254b2a0->Object::Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b1bf } -0x7fff9254b1bf->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b1bf, 0x7fff9254b2a0 } -Next token is token 'p' (0x7fff9254b2a0 'p'Exception caught: cleaning lookahead and stack -0x560624eb8340->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x560624eb8340, 0x7fff9254b2a0 } -0x560624eb8320->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x560624eb8320, 0x7fff9254b2a0 } -0x560624eb8300->Object::~Object { 0x560624eb82e0, 0x560624eb8300, 0x7fff9254b2a0 } -0x560624eb82e0->Object::~Object { 0x560624eb82e0, 0x7fff9254b2a0 } -0x7fff9254b2a0->Object::~Object { 0x7fff9254b2a0 } +0x7fffc32d0dff->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340 } +0x7fffc32d0ee0->Object::Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0dff } +0x7fffc32d0dff->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0dff, 0x7fffc32d0ee0 } +Next token is token 'p' (0x7fffc32d0ee0 'p'Exception caught: cleaning lookahead and stack +0x562161dc0340->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x562161dc0340, 0x7fffc32d0ee0 } +0x562161dc0320->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x562161dc0320, 0x7fffc32d0ee0 } +0x562161dc0300->Object::~Object { 0x562161dc02e0, 0x562161dc0300, 0x7fffc32d0ee0 } +0x562161dc02e0->Object::~Object { 0x562161dc02e0, 0x7fffc32d0ee0 } +0x7fffc32d0ee0->Object::~Object { 0x7fffc32d0ee0 } exception caught: printer end { } ./c++.at:1363: grep '^exception caught: printer$' stderr +stdout: +stderr: +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae +stdout: +./c++.at:572: $here/modern +stderr: +exception caught: syntax error +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +Modern C++: 201402 +./c++.at:572: $PREPARSER ./list +./c++.at:1363: $PREPARSER ./input aaaaE +stderr: stderr: Destroy: "0" Destroy: "0" @@ -267128,88 +267657,308 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: syntax error +exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT ======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:1363: $PREPARSER ./input aaaaE -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:1065: $PREPARSER ./input < in -./c++.at:1363: $PREPARSER ./input aaaaT +./c++.at:1361: $PREPARSER ./input aaaas +./c++.at:1363: $PREPARSER ./input aaaaR stderr: +stderr: +exception caught: reduction ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1361: $PREPARSER ./input aaaal stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1363: $PREPARSER ./input aaaaR -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: yylex +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input i stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +exception caught: initial-action +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:573: $here/modern +./c++.at:1361: $PREPARSER ./input aaaap +stdout: +stderr: +Modern C++: 201402 +stderr: +./c++.at:573: $PREPARSER ./list +stdout: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: $here/modern +stderr: +stdout: +./c++.at:1361: $PREPARSER ./input --debug aaaap +Modern C++: 201402 +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: $PREPARSER ./list +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5626bb941f40->Object::Object { } +Next token is token 'a' (0x5626bb941f40 'a') +Shifting token 'a' (0x5626bb941f40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626bb941f40 'a') +-> $$ = nterm item (0x5626bb941f40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x5626bb941f90->Object::Object { 0x5626bb941f40 } +Next token is token 'a' (0x5626bb941f90 'a') +Shifting token 'a' (0x5626bb941f90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626bb941f90 'a') +-> $$ = nterm item (0x5626bb941f90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x5626bb941fe0->Object::Object { 0x5626bb941f40, 0x5626bb941f90 } +Next token is token 'a' (0x5626bb941fe0 'a') +Shifting token 'a' (0x5626bb941fe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626bb941fe0 'a') +-> $$ = nterm item (0x5626bb941fe0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x5626bb942030->Object::Object { 0x5626bb941f40, 0x5626bb941f90, 0x5626bb941fe0 } +Next token is token 'a' (0x5626bb942030 'a') +Shifting token 'a' (0x5626bb942030 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626bb942030 'a') +-> $$ = nterm item (0x5626bb942030 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x5626bb942080->Object::Object { 0x5626bb941f40, 0x5626bb941f90, 0x5626bb941fe0, 0x5626bb942030 } +Next token is token 'p' (0x5626bb942080 'p'Exception caught: cleaning lookahead and stack +0x5626bb942080->Object::~Object { 0x5626bb941f40, 0x5626bb941f90, 0x5626bb941fe0, 0x5626bb942030, 0x5626bb942080 } +0x5626bb942030->Object::~Object { 0x5626bb941f40, 0x5626bb941f90, 0x5626bb941fe0, 0x5626bb942030 } +0x5626bb941fe0->Object::~Object { 0x5626bb941f40, 0x5626bb941f90, 0x5626bb941fe0 } +0x5626bb941f90->Object::~Object { 0x5626bb941f40, 0x5626bb941f90 } +0x5626bb941f40->Object::~Object { 0x5626bb941f40 } +exception caught: printer +end { } +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5626bb941f40->Object::Object { } +Next token is token 'a' (0x5626bb941f40 'a') +Shifting token 'a' (0x5626bb941f40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626bb941f40 'a') +-> $$ = nterm item (0x5626bb941f40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x5626bb941f90->Object::Object { 0x5626bb941f40 } +Next token is token 'a' (0x5626bb941f90 'a') +Shifting token 'a' (0x5626bb941f90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626bb941f90 'a') +-> $$ = nterm item (0x5626bb941f90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x5626bb941fe0->Object::Object { 0x5626bb941f40, 0x5626bb941f90 } +Next token is token 'a' (0x5626bb941fe0 'a') +Shifting token 'a' (0x5626bb941fe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626bb941fe0 'a') +-> $$ = nterm item (0x5626bb941fe0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x5626bb942030->Object::Object { 0x5626bb941f40, 0x5626bb941f90, 0x5626bb941fe0 } +Next token is token 'a' (0x5626bb942030 'a') +Shifting token 'a' (0x5626bb942030 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5626bb942030 'a') +-> $$ = nterm item (0x5626bb942030 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x5626bb942080->Object::Object { 0x5626bb941f40, 0x5626bb941f90, 0x5626bb941fe0, 0x5626bb942030 } +Next token is token 'p' (0x5626bb942080 'p'Exception caught: cleaning lookahead and stack +0x5626bb942080->Object::~Object { 0x5626bb941f40, 0x5626bb941f90, 0x5626bb941fe0, 0x5626bb942030, 0x5626bb942080 } +0x5626bb942030->Object::~Object { 0x5626bb941f40, 0x5626bb941f90, 0x5626bb941fe0, 0x5626bb942030 } +0x5626bb941fe0->Object::~Object { 0x5626bb941f40, 0x5626bb941f90, 0x5626bb941fe0 } +0x5626bb941f90->Object::~Object { 0x5626bb941f40, 0x5626bb941f90 } +0x5626bb941f40->Object::~Object { 0x5626bb941f40 } +exception caught: printer +end { } +./c++.at:1361: grep '^exception caught: printer$' stderr ======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stdout: +exception caught: printer +./c++.at:1361: $PREPARSER ./input aaaae stderr: -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:1360: $PREPARSER ./input aaaas stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ./c++.at:1360: $PREPARSER ./input aaaal stderr: -stdout: stderr: -./glr-regression.at:1965: $PREPARSER ./glr-regr17 exception caught: yylex +stdout: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -Ambiguity detected. -Option 1, - start -> - ambig1 -> - sub_ambig2 -> - empty2 -> - 'a' - 'b' - empty1 -> - -Option 2, - start -> - ambig2 -> - sub_ambig2 -> - empty2 -> - 'a' - 'b' - empty2 -> - -1.1-2.2: syntax is ambiguous -./glr-regression.at:1965: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:659: $PREPARSER ./input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input i stderr: -766. glr-regression.at:1965: ok exception caught: initial-action ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +675. c++.at:584: stdout: + ok +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ./c++.at:1360: $PREPARSER ./input aaaap stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -267219,57 +267968,57 @@ Entering state 0 Stack now 0 Reading a token -0x55f25c8e8f40->Object::Object { } -Next token is token 'a' (0x55f25c8e8f40 'a') -Shifting token 'a' (0x55f25c8e8f40 'a') +0x55ae00af9f40->Object::Object { } +Next token is token 'a' (0x55ae00af9f40 'a') +Shifting token 'a' (0x55ae00af9f40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f25c8e8f40 'a') --> $$ = nterm item (0x55f25c8e8f40 'a') + $1 = token 'a' (0x55ae00af9f40 'a') +-> $$ = nterm item (0x55ae00af9f40 'a') Entering state 11 Stack now 0 11 Reading a token -0x55f25c8e8f90->Object::Object { 0x55f25c8e8f40 } -Next token is token 'a' (0x55f25c8e8f90 'a') -Shifting token 'a' (0x55f25c8e8f90 'a') +0x55ae00af9f90->Object::Object { 0x55ae00af9f40 } +Next token is token 'a' (0x55ae00af9f90 'a') +Shifting token 'a' (0x55ae00af9f90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f25c8e8f90 'a') --> $$ = nterm item (0x55f25c8e8f90 'a') + $1 = token 'a' (0x55ae00af9f90 'a') +-> $$ = nterm item (0x55ae00af9f90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x55f25c8e8fe0->Object::Object { 0x55f25c8e8f40, 0x55f25c8e8f90 } -Next token is token 'a' (0x55f25c8e8fe0 'a') -Shifting token 'a' (0x55f25c8e8fe0 'a') +0x55ae00af9fe0->Object::Object { 0x55ae00af9f40, 0x55ae00af9f90 } +Next token is token 'a' (0x55ae00af9fe0 'a') +Shifting token 'a' (0x55ae00af9fe0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f25c8e8fe0 'a') --> $$ = nterm item (0x55f25c8e8fe0 'a') + $1 = token 'a' (0x55ae00af9fe0 'a') +-> $$ = nterm item (0x55ae00af9fe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x55f25c8e9030->Object::Object { 0x55f25c8e8f40, 0x55f25c8e8f90, 0x55f25c8e8fe0 } -Next token is token 'a' (0x55f25c8e9030 'a') -Shifting token 'a' (0x55f25c8e9030 'a') +0x55ae00afa030->Object::Object { 0x55ae00af9f40, 0x55ae00af9f90, 0x55ae00af9fe0 } +Next token is token 'a' (0x55ae00afa030 'a') +Shifting token 'a' (0x55ae00afa030 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f25c8e9030 'a') --> $$ = nterm item (0x55f25c8e9030 'a') + $1 = token 'a' (0x55ae00afa030 'a') +-> $$ = nterm item (0x55ae00afa030 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x55f25c8e9080->Object::Object { 0x55f25c8e8f40, 0x55f25c8e8f90, 0x55f25c8e8fe0, 0x55f25c8e9030 } -Next token is token 'p' (0x55f25c8e9080 'p'Exception caught: cleaning lookahead and stack -0x55f25c8e9080->Object::~Object { 0x55f25c8e8f40, 0x55f25c8e8f90, 0x55f25c8e8fe0, 0x55f25c8e9030, 0x55f25c8e9080 } -0x55f25c8e9030->Object::~Object { 0x55f25c8e8f40, 0x55f25c8e8f90, 0x55f25c8e8fe0, 0x55f25c8e9030 } -0x55f25c8e8fe0->Object::~Object { 0x55f25c8e8f40, 0x55f25c8e8f90, 0x55f25c8e8fe0 } -0x55f25c8e8f90->Object::~Object { 0x55f25c8e8f40, 0x55f25c8e8f90 } -0x55f25c8e8f40->Object::~Object { 0x55f25c8e8f40 } +0x55ae00afa080->Object::Object { 0x55ae00af9f40, 0x55ae00af9f90, 0x55ae00af9fe0, 0x55ae00afa030 } +Next token is token 'p' (0x55ae00afa080 'p'Exception caught: cleaning lookahead and stack +0x55ae00afa080->Object::~Object { 0x55ae00af9f40, 0x55ae00af9f90, 0x55ae00af9fe0, 0x55ae00afa030, 0x55ae00afa080 } +0x55ae00afa030->Object::~Object { 0x55ae00af9f40, 0x55ae00af9f90, 0x55ae00af9fe0, 0x55ae00afa030 } +0x55ae00af9fe0->Object::~Object { 0x55ae00af9f40, 0x55ae00af9f90, 0x55ae00af9fe0 } +0x55ae00af9f90->Object::~Object { 0x55ae00af9f40, 0x55ae00af9f90 } +0x55ae00af9f40->Object::~Object { 0x55ae00af9f40 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -267278,57 +268027,57 @@ Entering state 0 Stack now 0 Reading a token -0x55f25c8e8f40->Object::Object { } -Next token is token 'a' (0x55f25c8e8f40 'a') -Shifting token 'a' (0x55f25c8e8f40 'a') +0x55ae00af9f40->Object::Object { } +Next token is token 'a' (0x55ae00af9f40 'a') +Shifting token 'a' (0x55ae00af9f40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f25c8e8f40 'a') --> $$ = nterm item (0x55f25c8e8f40 'a') + $1 = token 'a' (0x55ae00af9f40 'a') +-> $$ = nterm item (0x55ae00af9f40 'a') Entering state 11 Stack now 0 11 Reading a token -0x55f25c8e8f90->Object::Object { 0x55f25c8e8f40 } -Next token is token 'a' (0x55f25c8e8f90 'a') -Shifting token 'a' (0x55f25c8e8f90 'a') +0x55ae00af9f90->Object::Object { 0x55ae00af9f40 } +Next token is token 'a' (0x55ae00af9f90 'a') +Shifting token 'a' (0x55ae00af9f90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f25c8e8f90 'a') --> $$ = nterm item (0x55f25c8e8f90 'a') + $1 = token 'a' (0x55ae00af9f90 'a') +-> $$ = nterm item (0x55ae00af9f90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x55f25c8e8fe0->Object::Object { 0x55f25c8e8f40, 0x55f25c8e8f90 } -Next token is token 'a' (0x55f25c8e8fe0 'a') -Shifting token 'a' (0x55f25c8e8fe0 'a') +0x55ae00af9fe0->Object::Object { 0x55ae00af9f40, 0x55ae00af9f90 } +Next token is token 'a' (0x55ae00af9fe0 'a') +Shifting token 'a' (0x55ae00af9fe0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f25c8e8fe0 'a') --> $$ = nterm item (0x55f25c8e8fe0 'a') + $1 = token 'a' (0x55ae00af9fe0 'a') +-> $$ = nterm item (0x55ae00af9fe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x55f25c8e9030->Object::Object { 0x55f25c8e8f40, 0x55f25c8e8f90, 0x55f25c8e8fe0 } -Next token is token 'a' (0x55f25c8e9030 'a') -Shifting token 'a' (0x55f25c8e9030 'a') +0x55ae00afa030->Object::Object { 0x55ae00af9f40, 0x55ae00af9f90, 0x55ae00af9fe0 } +Next token is token 'a' (0x55ae00afa030 'a') +Shifting token 'a' (0x55ae00afa030 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f25c8e9030 'a') --> $$ = nterm item (0x55f25c8e9030 'a') + $1 = token 'a' (0x55ae00afa030 'a') +-> $$ = nterm item (0x55ae00afa030 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x55f25c8e9080->Object::Object { 0x55f25c8e8f40, 0x55f25c8e8f90, 0x55f25c8e8fe0, 0x55f25c8e9030 } -Next token is token 'p' (0x55f25c8e9080 'p'Exception caught: cleaning lookahead and stack -0x55f25c8e9080->Object::~Object { 0x55f25c8e8f40, 0x55f25c8e8f90, 0x55f25c8e8fe0, 0x55f25c8e9030, 0x55f25c8e9080 } -0x55f25c8e9030->Object::~Object { 0x55f25c8e8f40, 0x55f25c8e8f90, 0x55f25c8e8fe0, 0x55f25c8e9030 } -0x55f25c8e8fe0->Object::~Object { 0x55f25c8e8f40, 0x55f25c8e8f90, 0x55f25c8e8fe0 } -0x55f25c8e8f90->Object::~Object { 0x55f25c8e8f40, 0x55f25c8e8f90 } -0x55f25c8e8f40->Object::~Object { 0x55f25c8e8f40 } +0x55ae00afa080->Object::Object { 0x55ae00af9f40, 0x55ae00af9f90, 0x55ae00af9fe0, 0x55ae00afa030 } +Next token is token 'p' (0x55ae00afa080 'p'Exception caught: cleaning lookahead and stack +0x55ae00afa080->Object::~Object { 0x55ae00af9f40, 0x55ae00af9f90, 0x55ae00af9fe0, 0x55ae00afa030, 0x55ae00afa080 } +0x55ae00afa030->Object::~Object { 0x55ae00af9f40, 0x55ae00af9f90, 0x55ae00af9fe0, 0x55ae00afa030 } +0x55ae00af9fe0->Object::~Object { 0x55ae00af9f40, 0x55ae00af9f90, 0x55ae00af9fe0 } +0x55ae00af9f90->Object::~Object { 0x55ae00af9f40, 0x55ae00af9f90 } +0x55ae00af9f40->Object::~Object { 0x55ae00af9f40 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr @@ -267341,193 +268090,51 @@ ./c++.at:1360: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1360: $PREPARSER ./input aaaaT -stdout: -./c++.at:570: $here/modern stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -Modern C++: 201402 -./c++.at:570: $PREPARSER ./list -./c++.at:1360: $PREPARSER ./input aaaaR +./c++.at:850: $PREPARSER ./input +./c++.at:1360: $PREPARSER ./input aaaaT stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:571: $here/modern -stdout: -Modern C++: 201402 -./c++.at:571: $PREPARSER ./list +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1360: $PREPARSER ./input aaaaR stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -stdout: -./c++.at:1066: $PREPARSER ./input < in -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1065: $PREPARSER ./input < in stderr: error: invalid expression caught error error: invalid character caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in stderr: error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: -./glr-regression.at:2229: $PREPARSER ./input Nwin -stderr: -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Owin -stderr: -syntax error, unexpected 'n', expecting 'o' -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Owio -stderr: -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Nwio -stderr: -syntax error, unexpected 'o', expecting 'n' -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./c++.at:856: $PREPARSER ./input +./c++.at:1065: $PREPARSER ./input < in stderr: -stdout: -./glr-regression.at:1447: $PREPARSER ./glr-regr13 -stderr: -774. glr-regression.at:2229: ok stderr: ./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -755. glr-regression.at:1447: ok -stderr: -stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./glr-regression.at:2230: $PREPARSER ./input Nwin -stderr: -./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2230: $PREPARSER ./input Owin -stderr: -syntax error, unexpected 'n', expecting 'o' -./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2230: $PREPARSER ./input Owio -stderr: -./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2230: $PREPARSER ./input Nwio -stderr: -syntax error, unexpected 'o', expecting 'n' -./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -775. glr-regression.at:2230: ok +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:572: $here/modern -stdout: -Modern C++: 201402 -./c++.at:572: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:1064: $PREPARSER ./input < in @@ -267536,7 +268143,6 @@ caught error error: invalid character caught error -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1064: $PREPARSER ./input < in stderr: @@ -267546,25 +268152,43 @@ stderr: error: invalid character ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1361: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:1361: $PREPARSER ./input aaaal stderr: stdout: +./c++.at:568: $here/modern +stdout: +Modern C++: 201703 +./c++.at:568: $PREPARSER ./list +stderr: +stdout: +./c++.at:566: $here/modern stderr: -./c++.at:569: $here/modern -exception caught: yylex +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) stdout: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Modern C++: 201402 -./c++.at:569: $PREPARSER ./list -./c++.at:1361: $PREPARSER ./input i +Modern C++: 201703 +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -267587,89 +268211,25 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -stderr: -exception caught: initial-action -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap -stderr: ======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1361: $PREPARSER ./input --debug aaaap +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./c++.at:1362: $PREPARSER ./input aaaas stdout: -./c++.at:573: $here/modern +./c++.at:235: $PREPARSER ./list stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x55e6aa022f40->Object::Object { } -Next token is token 'a' (0x55e6aa022f40 'a') -Shifting token 'a' (0x55e6aa022f40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55e6aa022f40 'a') --> $$ = nterm item (0x55e6aa022f40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x55e6aa022f90->Object::Object { 0x55e6aa022f40 } -Next token is token 'a' (0x55e6aa022f90 'a') -Shifting token 'a' (0x55e6aa022f90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55e6aa022f90 'a') --> $$ = nterm item (0x55e6aa022f90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x55e6aa022fe0->Object::Object { 0x55e6aa022f40, 0x55e6aa022f90 } -Next token is token 'a' (0x55e6aa022fe0 'a') -Shifting token 'a' (0x55e6aa022fe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55e6aa022fe0 'a') --> $$ = nterm item (0x55e6aa022fe0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x55e6aa023030->Object::Object { 0x55e6aa022f40, 0x55e6aa022f90, 0x55e6aa022fe0 } -Next token is token 'a' (0x55e6aa023030 'a') -Shifting token 'a' (0x55e6aa023030 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55e6aa023030 'a') --> $$ = nterm item (0x55e6aa023030 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x55e6aa023080->Object::Object { 0x55e6aa022f40, 0x55e6aa022f90, 0x55e6aa022fe0, 0x55e6aa023030 } -Next token is token 'p' (0x55e6aa023080 'p'Exception caught: cleaning lookahead and stack -0x55e6aa023080->Object::~Object { 0x55e6aa022f40, 0x55e6aa022f90, 0x55e6aa022fe0, 0x55e6aa023030, 0x55e6aa023080 } -0x55e6aa023030->Object::~Object { 0x55e6aa022f40, 0x55e6aa022f90, 0x55e6aa022fe0, 0x55e6aa023030 } -0x55e6aa022fe0->Object::~Object { 0x55e6aa022f40, 0x55e6aa022f90, 0x55e6aa022fe0 } -0x55e6aa022f90->Object::~Object { 0x55e6aa022f40, 0x55e6aa022f90 } -0x55e6aa022f40->Object::~Object { 0x55e6aa022f40 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +664. c++.at:107: ok stderr: stdout: -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Modern C++: 201402 -./c++.at:573: $PREPARSER ./list -stderr: -./c++.at:1362: $PREPARSER ./input aaaal +./c++.at:567: $here/modern +stdout: +Modern C++: 201703 +./c++.at:567: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -267692,562 +268252,302 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x55e6aa022f40->Object::Object { } -Next token is token 'a' (0x55e6aa022f40 'a') -Shifting token 'a' (0x55e6aa022f40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55e6aa022f40 'a') --> $$ = nterm item (0x55e6aa022f40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x55e6aa022f90->Object::Object { 0x55e6aa022f40 } -Next token is token 'a' (0x55e6aa022f90 'a') -Shifting token 'a' (0x55e6aa022f90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55e6aa022f90 'a') --> $$ = nterm item (0x55e6aa022f90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x55e6aa022fe0->Object::Object { 0x55e6aa022f40, 0x55e6aa022f90 } -Next token is token 'a' (0x55e6aa022fe0 'a') -Shifting token 'a' (0x55e6aa022fe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55e6aa022fe0 'a') --> $$ = nterm item (0x55e6aa022fe0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x55e6aa023030->Object::Object { 0x55e6aa022f40, 0x55e6aa022f90, 0x55e6aa022fe0 } -Next token is token 'a' (0x55e6aa023030 'a') -Shifting token 'a' (0x55e6aa023030 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55e6aa023030 'a') --> $$ = nterm item (0x55e6aa023030 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x55e6aa023080->Object::Object { 0x55e6aa022f40, 0x55e6aa022f90, 0x55e6aa022fe0, 0x55e6aa023030 } -Next token is token 'p' (0x55e6aa023080 'p'Exception caught: cleaning lookahead and stack -0x55e6aa023080->Object::~Object { 0x55e6aa022f40, 0x55e6aa022f90, 0x55e6aa022fe0, 0x55e6aa023030, 0x55e6aa023080 } -0x55e6aa023030->Object::~Object { 0x55e6aa022f40, 0x55e6aa022f90, 0x55e6aa022fe0, 0x55e6aa023030 } -0x55e6aa022fe0->Object::~Object { 0x55e6aa022f40, 0x55e6aa022f90, 0x55e6aa022fe0 } -0x55e6aa022f90->Object::~Object { 0x55e6aa022f40, 0x55e6aa022f90 } -0x55e6aa022f40->Object::~Object { 0x55e6aa022f40 } -exception caught: printer -end { } -./c++.at:1361: grep '^exception caught: printer$' stderr +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -exception caught: printer +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -exception caught: syntax error +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +stderr: +stdout: +./c++.at:1362: $PREPARSER ./input aaaas +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input i -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: initial-action ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaap -./c++.at:1361: $PREPARSER ./input aaaaT stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input --debug aaaap -./c++.at:1361: $PREPARSER ./input aaaaR stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffc1747eb37->Object::Object { } -0x7ffc1747ebd0->Object::Object { 0x7ffc1747eb37 } -0x7ffc1747eb37->Object::~Object { 0x7ffc1747eb37, 0x7ffc1747ebd0 } -Next token is token 'a' (0x7ffc1747ebd0 'a') -0x7ffc1747eb10->Object::Object { 0x7ffc1747ebd0 } -0x7ffc1747ebd0->Object::~Object { 0x7ffc1747eb10, 0x7ffc1747ebd0 } -Shifting token 'a' (0x7ffc1747eb10 'a') -0x556f334c52e0->Object::Object { 0x7ffc1747eb10 } -0x7ffc1747eb10->Object::~Object { 0x556f334c52e0, 0x7ffc1747eb10 } +0x7ffc96860d5f->Object::Object { } +0x7ffc96860e40->Object::Object { 0x7ffc96860d5f } +0x7ffc96860d5f->Object::~Object { 0x7ffc96860d5f, 0x7ffc96860e40 } +Next token is token 'a' (0x7ffc96860e40 'a') +0x7ffc96860d80->Object::Object { 0x7ffc96860e40 } +0x7ffc96860e40->Object::~Object { 0x7ffc96860d80, 0x7ffc96860e40 } +Shifting token 'a' (0x7ffc96860d80 'a') +0x55b6f21272e0->Object::Object { 0x7ffc96860d80 } +0x7ffc96860d80->Object::~Object { 0x55b6f21272e0, 0x7ffc96860d80 } Entering state 2 Stack now 0 2 -0x7ffc1747ebf0->Object::Object { 0x556f334c52e0 } +0x7ffc96860e60->Object::Object { 0x55b6f21272e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556f334c52e0 'a') --> $$ = nterm item (0x7ffc1747ebf0 'a') -0x556f334c52e0->Object::~Object { 0x556f334c52e0, 0x7ffc1747ebf0 } -0x556f334c52e0->Object::Object { 0x7ffc1747ebf0 } -0x7ffc1747ebf0->Object::~Object { 0x556f334c52e0, 0x7ffc1747ebf0 } + $1 = token 'a' (0x55b6f21272e0 'a') +-> $$ = nterm item (0x7ffc96860e60 'a') +0x55b6f21272e0->Object::~Object { 0x55b6f21272e0, 0x7ffc96860e60 } +0x55b6f21272e0->Object::Object { 0x7ffc96860e60 } +0x7ffc96860e60->Object::~Object { 0x55b6f21272e0, 0x7ffc96860e60 } Entering state 11 Stack now 0 11 Reading a token -0x7ffc1747eb37->Object::Object { 0x556f334c52e0 } -0x7ffc1747ebd0->Object::Object { 0x556f334c52e0, 0x7ffc1747eb37 } -0x7ffc1747eb37->Object::~Object { 0x556f334c52e0, 0x7ffc1747eb37, 0x7ffc1747ebd0 } -Next token is token 'a' (0x7ffc1747ebd0 'a') -0x7ffc1747eb10->Object::Object { 0x556f334c52e0, 0x7ffc1747ebd0 } -0x7ffc1747ebd0->Object::~Object { 0x556f334c52e0, 0x7ffc1747eb10, 0x7ffc1747ebd0 } -Shifting token 'a' (0x7ffc1747eb10 'a') -0x556f334c5300->Object::Object { 0x556f334c52e0, 0x7ffc1747eb10 } -0x7ffc1747eb10->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747eb10 } +0x7ffc96860d5f->Object::Object { 0x55b6f21272e0 } +0x7ffc96860e40->Object::Object { 0x55b6f21272e0, 0x7ffc96860d5f } +0x7ffc96860d5f->Object::~Object { 0x55b6f21272e0, 0x7ffc96860d5f, 0x7ffc96860e40 } +Next token is token 'a' (0x7ffc96860e40 'a') +0x7ffc96860d80->Object::Object { 0x55b6f21272e0, 0x7ffc96860e40 } +0x7ffc96860e40->Object::~Object { 0x55b6f21272e0, 0x7ffc96860d80, 0x7ffc96860e40 } +Shifting token 'a' (0x7ffc96860d80 'a') +0x55b6f2127300->Object::Object { 0x55b6f21272e0, 0x7ffc96860d80 } +0x7ffc96860d80->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860d80 } Entering state 2 Stack now 0 11 2 -0x7ffc1747ebf0->Object::Object { 0x556f334c52e0, 0x556f334c5300 } +0x7ffc96860e60->Object::Object { 0x55b6f21272e0, 0x55b6f2127300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556f334c5300 'a') --> $$ = nterm item (0x7ffc1747ebf0 'a') -0x556f334c5300->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747ebf0 } -0x556f334c5300->Object::Object { 0x556f334c52e0, 0x7ffc1747ebf0 } -0x7ffc1747ebf0->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747ebf0 } + $1 = token 'a' (0x55b6f2127300 'a') +-> $$ = nterm item (0x7ffc96860e60 'a') +0x55b6f2127300->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860e60 } +0x55b6f2127300->Object::Object { 0x55b6f21272e0, 0x7ffc96860e60 } +0x7ffc96860e60->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860e60 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffc1747eb37->Object::Object { 0x556f334c52e0, 0x556f334c5300 } -0x7ffc1747ebd0->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747eb37 } -0x7ffc1747eb37->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747eb37, 0x7ffc1747ebd0 } -Next token is token 'a' (0x7ffc1747ebd0 'a') -0x7ffc1747eb10->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747ebd0 } -0x7ffc1747ebd0->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747eb10, 0x7ffc1747ebd0 } -Shifting token 'a' (0x7ffc1747eb10 'a') -0x556f334c5320->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747eb10 } -0x7ffc1747eb10->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747eb10 } +0x7ffc96860d5f->Object::Object { 0x55b6f21272e0, 0x55b6f2127300 } +0x7ffc96860e40->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860d5f } +0x7ffc96860d5f->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860d5f, 0x7ffc96860e40 } +Next token is token 'a' (0x7ffc96860e40 'a') +0x7ffc96860d80->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860e40 } +0x7ffc96860e40->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860d80, 0x7ffc96860e40 } +Shifting token 'a' (0x7ffc96860d80 'a') +0x55b6f2127320->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860d80 } +0x7ffc96860d80->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860d80 } Entering state 2 Stack now 0 11 11 2 -0x7ffc1747ebf0->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320 } +0x7ffc96860e60->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556f334c5320 'a') --> $$ = nterm item (0x7ffc1747ebf0 'a') -0x556f334c5320->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747ebf0 } -0x556f334c5320->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747ebf0 } -0x7ffc1747ebf0->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747ebf0 } + $1 = token 'a' (0x55b6f2127320 'a') +-> $$ = nterm item (0x7ffc96860e60 'a') +0x55b6f2127320->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860e60 } +0x55b6f2127320->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860e60 } +0x7ffc96860e60->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860e60 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffc1747eb37->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320 } -0x7ffc1747ebd0->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747eb37 } -0x7ffc1747eb37->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747eb37, 0x7ffc1747ebd0 } -Next token is token 'a' (0x7ffc1747ebd0 'a') -0x7ffc1747eb10->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747ebd0 } -0x7ffc1747ebd0->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747eb10, 0x7ffc1747ebd0 } -Shifting token 'a' (0x7ffc1747eb10 'a') -0x556f334c5340->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747eb10 } -0x7ffc1747eb10->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747eb10 } +0x7ffc96860d5f->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320 } +0x7ffc96860e40->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860d5f } +0x7ffc96860d5f->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860d5f, 0x7ffc96860e40 } +Next token is token 'a' (0x7ffc96860e40 'a') +0x7ffc96860d80->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860e40 } +0x7ffc96860e40->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860d80, 0x7ffc96860e40 } +Shifting token 'a' (0x7ffc96860d80 'a') +0x55b6f2127340->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860d80 } +0x7ffc96860d80->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860d80 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffc1747ebf0->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340 } +0x7ffc96860e60->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556f334c5340 'a') --> $$ = nterm item (0x7ffc1747ebf0 'a') -0x556f334c5340->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747ebf0 } -0x556f334c5340->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747ebf0 } -0x7ffc1747ebf0->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747ebf0 } + $1 = token 'a' (0x55b6f2127340 'a') +-> $$ = nterm item (0x7ffc96860e60 'a') +0x55b6f2127340->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860e60 } +0x55b6f2127340->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860e60 } +0x7ffc96860e60->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860e60 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffc1747eb37->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340 } -0x7ffc1747ebd0->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747eb37 } -0x7ffc1747eb37->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747eb37, 0x7ffc1747ebd0 } -Next token is token 'p' (0x7ffc1747ebd0 'p'Exception caught: cleaning lookahead and stack -0x556f334c5340->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747ebd0 } -0x556f334c5320->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747ebd0 } -0x556f334c5300->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747ebd0 } -0x556f334c52e0->Object::~Object { 0x556f334c52e0, 0x7ffc1747ebd0 } -0x7ffc1747ebd0->Object::~Object { 0x7ffc1747ebd0 } +0x7ffc96860d5f->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340 } +0x7ffc96860e40->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860d5f } +0x7ffc96860d5f->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860d5f, 0x7ffc96860e40 } +Next token is token 'p' (0x7ffc96860e40 'p'Exception caught: cleaning lookahead and stack +0x55b6f2127340->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860e40 } +0x55b6f2127320->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860e40 } +0x55b6f2127300->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860e40 } +0x55b6f21272e0->Object::~Object { 0x55b6f21272e0, 0x7ffc96860e40 } +0x7ffc96860e40->Object::~Object { 0x7ffc96860e40 } exception caught: printer end { } -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -stdout: -./c++.at:1555: $PREPARSER ./test Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffc1747eb37->Object::Object { } -0x7ffc1747ebd0->Object::Object { 0x7ffc1747eb37 } -0x7ffc1747eb37->Object::~Object { 0x7ffc1747eb37, 0x7ffc1747ebd0 } -Next token is token 'a' (0x7ffc1747ebd0 'a') -0x7ffc1747eb10->Object::Object { 0x7ffc1747ebd0 } -0x7ffc1747ebd0->Object::~Object { 0x7ffc1747eb10, 0x7ffc1747ebd0 } -Shifting token 'a' (0x7ffc1747eb10 'a') -0x556f334c52e0->Object::Object { 0x7ffc1747eb10 } -0x7ffc1747eb10->Object::~Object { 0x556f334c52e0, 0x7ffc1747eb10 } +0x7ffc96860d5f->Object::Object { } +0x7ffc96860e40->Object::Object { 0x7ffc96860d5f } +0x7ffc96860d5f->Object::~Object { 0x7ffc96860d5f, 0x7ffc96860e40 } +Next token is token 'a' (0x7ffc96860e40 'a') +0x7ffc96860d80->Object::Object { 0x7ffc96860e40 } +0x7ffc96860e40->Object::~Object { 0x7ffc96860d80, 0x7ffc96860e40 } +Shifting token 'a' (0x7ffc96860d80 'a') +0x55b6f21272e0->Object::Object { 0x7ffc96860d80 } +0x7ffc96860d80->Object::~Object { 0x55b6f21272e0, 0x7ffc96860d80 } Entering state 2 Stack now 0 2 -0x7ffc1747ebf0->Object::Object { 0x556f334c52e0 } +0x7ffc96860e60->Object::Object { 0x55b6f21272e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556f334c52e0 'a') --> $$ = nterm item (0x7ffc1747ebf0 'a') -0x556f334c52e0->Object::~Object { 0x556f334c52e0, 0x7ffc1747ebf0 } -0x556f334c52e0->Object::Object { 0x7ffc1747ebf0 } -0x7ffc1747ebf0->Object::~Object { 0x556f334c52e0, 0x7ffc1747ebf0 } + $1 = token 'a' (0x55b6f21272e0 'a') +-> $$ = nterm item (0x7ffc96860e60 'a') +0x55b6f21272e0->Object::~Object { 0x55b6f21272e0, 0x7ffc96860e60 } +0x55b6f21272e0->Object::Object { 0x7ffc96860e60 } +0x7ffc96860e60->Object::~Object { 0x55b6f21272e0, 0x7ffc96860e60 } Entering state 11 Stack now 0 11 Reading a token -0x7ffc1747eb37->Object::Object { 0x556f334c52e0 } -0x7ffc1747ebd0->Object::Object { 0x556f334c52e0, 0x7ffc1747eb37 } -0x7ffc1747eb37->Object::~Object { 0x556f334c52e0, 0x7ffc1747eb37, 0x7ffc1747ebd0 } -Next token is token 'a' (0x7ffc1747ebd0 'a') -0x7ffc1747eb10->Object::Object { 0x556f334c52e0, 0x7ffc1747ebd0 } -0x7ffc1747ebd0->Object::~Object { 0x556f334c52e0, 0x7ffc1747eb10, 0x7ffc1747ebd0 } -Shifting token 'a' (0x7ffc1747eb10 'a') -0x556f334c5300->Object::Object { 0x556f334c52e0, 0x7ffc1747eb10 } -0x7ffc1747eb10->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747eb10 } +0x7ffc96860d5f->Object::Object { 0x55b6f21272e0 } +0x7ffc96860e40->Object::Object { 0x55b6f21272e0, 0x7ffc96860d5f } +0x7ffc96860d5f->Object::~Object { 0x55b6f21272e0, 0x7ffc96860d5f, 0x7ffc96860e40 } +Next token is token 'a' (0x7ffc96860e40 'a') +0x7ffc96860d80->Object::Object { 0x55b6f21272e0, 0x7ffc96860e40 } +0x7ffc96860e40->Object::~Object { 0x55b6f21272e0, 0x7ffc96860d80, 0x7ffc96860e40 } +Shifting token 'a' (0x7ffc96860d80 'a') +0x55b6f2127300->Object::Object { 0x55b6f21272e0, 0x7ffc96860d80 } +0x7ffc96860d80->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860d80 } Entering state 2 Stack now 0 11 2 -0x7ffc1747ebf0->Object::Object { 0x556f334c52e0, 0x556f334c5300 } +0x7ffc96860e60->Object::Object { 0x55b6f21272e0, 0x55b6f2127300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556f334c5300 'a') --> $$ = nterm item (0x7ffc1747ebf0 'a') -0x556f334c5300->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747ebf0 } -0x556f334c5300->Object::Object { 0x556f334c52e0, 0x7ffc1747ebf0 } -0x7ffc1747ebf0->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747ebf0 } + $1 = token 'a' (0x55b6f2127300 'a') +-> $$ = nterm item (0x7ffc96860e60 'a') +0x55b6f2127300->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860e60 } +0x55b6f2127300->Object::Object { 0x55b6f21272e0, 0x7ffc96860e60 } +0x7ffc96860e60->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860e60 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffc1747eb37->Object::Object { 0x556f334c52e0, 0x556f334c5300 } -0x7ffc1747ebd0->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747eb37 } -0x7ffc1747eb37->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747eb37, 0x7ffc1747ebd0 } -Next token is token 'a' (0x7ffc1747ebd0 'a') -0x7ffc1747eb10->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747ebd0 } -0x7ffc1747ebd0->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747eb10, 0x7ffc1747ebd0 } -Shifting token 'a' (0x7ffc1747eb10 'a') -0x556f334c5320->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747eb10 } -0x7ffc1747eb10->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747eb10 } +0x7ffc96860d5f->Object::Object { 0x55b6f21272e0, 0x55b6f2127300 } +0x7ffc96860e40->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860d5f } +0x7ffc96860d5f->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860d5f, 0x7ffc96860e40 } +Next token is token 'a' (0x7ffc96860e40 'a') +0x7ffc96860d80->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860e40 } +0x7ffc96860e40->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860d80, 0x7ffc96860e40 } +Shifting token 'a' (0x7ffc96860d80 'a') +0x55b6f2127320->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860d80 } +0x7ffc96860d80->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860d80 } Entering state 2 Stack now 0 11 11 2 -0x7ffc1747ebf0->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320 } +0x7ffc96860e60->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556f334c5320 'a') --> $$ = nterm item (0x7ffc1747ebf0 'a') -0x556f334c5320->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747ebf0 } -0x556f334c5320->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747ebf0 } -0x7ffc1747ebf0->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747ebf0 } + $1 = token 'a' (0x55b6f2127320 'a') +-> $$ = nterm item (0x7ffc96860e60 'a') +0x55b6f2127320->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860e60 } +0x55b6f2127320->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860e60 } +0x7ffc96860e60->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860e60 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffc1747eb37->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320 } -0x7ffc1747ebd0->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747eb37 } -0x7ffc1747eb37->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747eb37, 0x7ffc1747ebd0 } -Next token is token 'a' (0x7ffc1747ebd0 'a') -0x7ffc1747eb10->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747ebd0 } -0x7ffc1747ebd0->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747eb10, 0x7ffc1747ebd0 } -Shifting token 'a' (0x7ffc1747eb10 'a') -0x556f334c5340->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747eb10 } -0x7ffc1747eb10->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747eb10 } +0x7ffc96860d5f->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320 } +0x7ffc96860e40->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860d5f } +0x7ffc96860d5f->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860d5f, 0x7ffc96860e40 } +Next token is token 'a' (0x7ffc96860e40 'a') +0x7ffc96860d80->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860e40 } +0x7ffc96860e40->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860d80, 0x7ffc96860e40 } +Shifting token 'a' (0x7ffc96860d80 'a') +0x55b6f2127340->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860d80 } +0x7ffc96860d80->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860d80 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffc1747ebf0->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340 } +0x7ffc96860e60->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556f334c5340 'a') --> $$ = nterm item (0x7ffc1747ebf0 'a') -0x556f334c5340->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747ebf0 } -0x556f334c5340->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747ebf0 } -0x7ffc1747ebf0->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747ebf0 } + $1 = token 'a' (0x55b6f2127340 'a') +-> $$ = nterm item (0x7ffc96860e60 'a') +0x55b6f2127340->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860e60 } +0x55b6f2127340->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860e60 } +0x7ffc96860e60->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860e60 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffc1747eb37->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340 } -0x7ffc1747ebd0->Object::Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747eb37 } -0x7ffc1747eb37->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747eb37, 0x7ffc1747ebd0 } -Next token is token 'p' (0x7ffc1747ebd0 'p'Exception caught: cleaning lookahead and stack -0x556f334c5340->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x556f334c5340, 0x7ffc1747ebd0 } -0x556f334c5320->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x556f334c5320, 0x7ffc1747ebd0 } -0x556f334c5300->Object::~Object { 0x556f334c52e0, 0x556f334c5300, 0x7ffc1747ebd0 } -0x556f334c52e0->Object::~Object { 0x556f334c52e0, 0x7ffc1747ebd0 } -0x7ffc1747ebd0->Object::~Object { 0x7ffc1747ebd0 } +0x7ffc96860d5f->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340 } +0x7ffc96860e40->Object::Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860d5f } +0x7ffc96860d5f->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860d5f, 0x7ffc96860e40 } +Next token is token 'p' (0x7ffc96860e40 'p'Exception caught: cleaning lookahead and stack +0x55b6f2127340->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x55b6f2127340, 0x7ffc96860e40 } +0x55b6f2127320->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x55b6f2127320, 0x7ffc96860e40 } +0x55b6f2127300->Object::~Object { 0x55b6f21272e0, 0x55b6f2127300, 0x7ffc96860e40 } +0x55b6f21272e0->Object::~Object { 0x55b6f21272e0, 0x7ffc96860e40 } +0x7ffc96860e40->Object::~Object { 0x7ffc96860e40 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr -======== Testing with C++ standard flags: '' -stderr: -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: stdout: exception caught: printer -./glr-regression.at:2150: $PREPARSER ./input --debug -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaae stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token 'a' () -Shifting token 'a' () -Entering state 1 -Reading a token -Next token is token 'b' () -Shifting token 'b' () -Entering state 3 -Reducing stack 0 by rule 3 (line 30): - $1 = token 'b' () --> $$ = nterm b () -Entering state 4 -Reading a token -Next token is token 'c' () -Shifting token 'c' () -Entering state 6 -Reducing stack 0 by rule 4 (line 31): --> $$ = nterm d () -Entering state 7 -Reading a token -Now at end of input. -Stack 0 Entering state 7 -Now at end of input. -Splitting off stack 1 from 0. -Reduced stack 1 by rule 2 (line 28); action deferred. Now in state 2. -Stack 1 Entering state 2 -Now at end of input. -Reduced stack 0 by rule 1 (line 27); action deferred. Now in state 2. -Merging stack 0 into stack 1. -Stack 1 Entering state 2 -Now at end of input. -Removing dead stacks. -Rename stack 1 -> 0. -On stack 0, shifting token "end of file" () -Stack 0 now in state 5 -Ambiguity detected. -Option 1, - start -> - 'a' - b - 'c' - d - -Option 2, - start -> - 'a' - b - 'c' - d - -syntax is ambiguous -Cleanup: popping token "end of file" () -Cleanup: popping unresolved nterm start () -Cleanup: popping nterm d () -Cleanup: popping token 'c' () -Cleanup: popping nterm b () -Cleanup: popping token 'a' () -stderr: -./glr-regression.at:2150: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: syntax error -691. c++.at:1517: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaE -772. glr-regression.at:2150: ok -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:659: $PREPARSER ./input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./glr-regression.at:1680: $PREPARSER ./glr-regr14 -675. c++.at:584: ok -stderr: -./glr-regression.at:1680: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -758. glr-regression.at:1680: ok -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./glr-regression.at:1787: $PREPARSER ./glr-regr15 -stderr: -Ambiguity detected. -Option 1, - ambiguity -> - ambiguity1 -> - -Option 2, - ambiguity -> - ambiguity2 -> - -syntax is ambiguous -./glr-regression.at:1787: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -761. glr-regression.at:1787: ok stderr: stdout: -./glr-regression.at:1862: $PREPARSER ./glr-regr16 -stderr: -syntax error -./glr-regression.at:1862: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -764. glr-regression.at:1862: ok -stderr: +./c++.at:570: $here/modern +./c++.at:1362: $PREPARSER ./input aaaaT stdout: -./c++.at:856: $PREPARSER ./input -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stdout: -./c++.at:574: $here/modern -stdout: -Modern C++: 201402 -./c++.at:574: $PREPARSER ./list +Modern C++: 201703 +./c++.at:570: $PREPARSER ./list +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR stderr: -Destroy: "" -Destroy: "" +Destroy: "0" +Destroy: "0" Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) Destroy: "" Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -664. c++.at:107: ok -stderr: -stdout: -./glr-regression.at:1966: $PREPARSER ./glr-regr17 -stderr: -Ambiguity detected. -Option 1, - start -> - ambig1 -> - sub_ambig2 -> - empty2 -> - 'a' - 'b' - empty1 -> - -Option 2, - start -> - ambig2 -> - sub_ambig2 -> - empty2 -> - 'a' - 'b' - empty2 -> - -1.1-2.2: syntax is ambiguous -./glr-regression.at:1966: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -767. glr-regression.at:1966: ok stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./c++.at:1363: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1363: $PREPARSER ./input aaaal stderr: exception caught: yylex @@ -268258,754 +268558,598 @@ ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaap stderr: +stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +stdout: +./c++.at:1361: $PREPARSER ./input aaaas +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff53a53aef->Object::Object { } -0x7fff53a53bd0->Object::Object { 0x7fff53a53aef } -0x7fff53a53aef->Object::~Object { 0x7fff53a53aef, 0x7fff53a53bd0 } -Next token is token 'a' (0x7fff53a53bd0 'a') -0x7fff53a53b10->Object::Object { 0x7fff53a53bd0 } -0x7fff53a53bd0->Object::~Object { 0x7fff53a53b10, 0x7fff53a53bd0 } -Shifting token 'a' (0x7fff53a53b10 'a') -0x55c8e75d82e0->Object::Object { 0x7fff53a53b10 } -0x7fff53a53b10->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53b10 } +0x7ffcc897beef->Object::Object { } +0x7ffcc897bfd0->Object::Object { 0x7ffcc897beef } +0x7ffcc897beef->Object::~Object { 0x7ffcc897beef, 0x7ffcc897bfd0 } +Next token is token 'a' (0x7ffcc897bfd0 'a') +0x7ffcc897bf10->Object::Object { 0x7ffcc897bfd0 } +0x7ffcc897bfd0->Object::~Object { 0x7ffcc897bf10, 0x7ffcc897bfd0 } +Shifting token 'a' (0x7ffcc897bf10 'a') +0x55b81a44e2e0->Object::Object { 0x7ffcc897bf10 } +0x7ffcc897bf10->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897bf10 } Entering state 1 Stack now 0 1 -0x7fff53a53bf0->Object::Object { 0x55c8e75d82e0 } +0x7ffcc897bff0->Object::Object { 0x55b81a44e2e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c8e75d82e0 'a') --> $$ = nterm item (0x7fff53a53bf0 'a') -0x55c8e75d82e0->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53bf0 } -0x55c8e75d82e0->Object::Object { 0x7fff53a53bf0 } -0x7fff53a53bf0->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53bf0 } + $1 = token 'a' (0x55b81a44e2e0 'a') +-> $$ = nterm item (0x7ffcc897bff0 'a') +0x55b81a44e2e0->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897bff0 } +0x55b81a44e2e0->Object::Object { 0x7ffcc897bff0 } +0x7ffcc897bff0->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897bff0 } Entering state 10 Stack now 0 10 Reading a token -0x7fff53a53aef->Object::Object { 0x55c8e75d82e0 } -0x7fff53a53bd0->Object::Object { 0x55c8e75d82e0, 0x7fff53a53aef } -0x7fff53a53aef->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53aef, 0x7fff53a53bd0 } -Next token is token 'a' (0x7fff53a53bd0 'a') -0x7fff53a53b10->Object::Object { 0x55c8e75d82e0, 0x7fff53a53bd0 } -0x7fff53a53bd0->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53b10, 0x7fff53a53bd0 } -Shifting token 'a' (0x7fff53a53b10 'a') -0x55c8e75d8300->Object::Object { 0x55c8e75d82e0, 0x7fff53a53b10 } -0x7fff53a53b10->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53b10 } +0x7ffcc897beef->Object::Object { 0x55b81a44e2e0 } +0x7ffcc897bfd0->Object::Object { 0x55b81a44e2e0, 0x7ffcc897beef } +0x7ffcc897beef->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897beef, 0x7ffcc897bfd0 } +Next token is token 'a' (0x7ffcc897bfd0 'a') +0x7ffcc897bf10->Object::Object { 0x55b81a44e2e0, 0x7ffcc897bfd0 } +0x7ffcc897bfd0->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897bf10, 0x7ffcc897bfd0 } +Shifting token 'a' (0x7ffcc897bf10 'a') +0x55b81a44e300->Object::Object { 0x55b81a44e2e0, 0x7ffcc897bf10 } +0x7ffcc897bf10->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bf10 } Entering state 1 Stack now 0 10 1 -0x7fff53a53bf0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300 } +0x7ffcc897bff0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c8e75d8300 'a') --> $$ = nterm item (0x7fff53a53bf0 'a') -0x55c8e75d8300->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53bf0 } -0x55c8e75d8300->Object::Object { 0x55c8e75d82e0, 0x7fff53a53bf0 } -0x7fff53a53bf0->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53bf0 } + $1 = token 'a' (0x55b81a44e300 'a') +-> $$ = nterm item (0x7ffcc897bff0 'a') +0x55b81a44e300->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bff0 } +0x55b81a44e300->Object::Object { 0x55b81a44e2e0, 0x7ffcc897bff0 } +0x7ffcc897bff0->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bff0 } Entering state 10 Stack now 0 10 10 Reading a token -0x7fff53a53aef->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300 } -0x7fff53a53bd0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53aef } -0x7fff53a53aef->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53aef, 0x7fff53a53bd0 } -Next token is token 'a' (0x7fff53a53bd0 'a') -0x7fff53a53b10->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53bd0 } -0x7fff53a53bd0->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53b10, 0x7fff53a53bd0 } -Shifting token 'a' (0x7fff53a53b10 'a') -0x55c8e75d8320->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53b10 } -0x7fff53a53b10->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53b10 } +0x7ffcc897beef->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300 } +0x7ffcc897bfd0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897beef } +0x7ffcc897beef->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897beef, 0x7ffcc897bfd0 } +Next token is token 'a' (0x7ffcc897bfd0 'a') +0x7ffcc897bf10->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bfd0 } +0x7ffcc897bfd0->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bf10, 0x7ffcc897bfd0 } +Shifting token 'a' (0x7ffcc897bf10 'a') +0x55b81a44e320->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bf10 } +0x7ffcc897bf10->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bf10 } Entering state 1 Stack now 0 10 10 1 -0x7fff53a53bf0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320 } +0x7ffcc897bff0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c8e75d8320 'a') --> $$ = nterm item (0x7fff53a53bf0 'a') -0x55c8e75d8320->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53bf0 } -0x55c8e75d8320->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53bf0 } -0x7fff53a53bf0->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53bf0 } + $1 = token 'a' (0x55b81a44e320 'a') +-> $$ = nterm item (0x7ffcc897bff0 'a') +0x55b81a44e320->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bff0 } +0x55b81a44e320->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bff0 } +0x7ffcc897bff0->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bff0 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7fff53a53aef->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320 } -0x7fff53a53bd0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53aef } -0x7fff53a53aef->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53aef, 0x7fff53a53bd0 } -Next token is token 'a' (0x7fff53a53bd0 'a') -0x7fff53a53b10->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53bd0 } -0x7fff53a53bd0->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53b10, 0x7fff53a53bd0 } -Shifting token 'a' (0x7fff53a53b10 'a') -0x55c8e75d8340->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53b10 } -0x7fff53a53b10->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53b10 } +0x7ffcc897beef->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320 } +0x7ffcc897bfd0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897beef } +0x7ffcc897beef->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897beef, 0x7ffcc897bfd0 } +Next token is token 'a' (0x7ffcc897bfd0 'a') +0x7ffcc897bf10->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bfd0 } +0x7ffcc897bfd0->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bf10, 0x7ffcc897bfd0 } +Shifting token 'a' (0x7ffcc897bf10 'a') +0x55b81a44e340->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bf10 } +0x7ffcc897bf10->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897bf10 } Entering state 1 Stack now 0 10 10 10 1 -0x7fff53a53bf0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340 } +0x7ffcc897bff0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c8e75d8340 'a') --> $$ = nterm item (0x7fff53a53bf0 'a') -0x55c8e75d8340->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53bf0 } -0x55c8e75d8340->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53bf0 } -0x7fff53a53bf0->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53bf0 } + $1 = token 'a' (0x55b81a44e340 'a') +-> $$ = nterm item (0x7ffcc897bff0 'a') +0x55b81a44e340->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897bff0 } +0x55b81a44e340->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bff0 } +0x7ffcc897bff0->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897bff0 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7fff53a53aef->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340 } -0x7fff53a53bd0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53aef } -0x7fff53a53aef->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53aef, 0x7fff53a53bd0 } -Next token is token 'p' (0x7fff53a53bd0 'p'Exception caught: cleaning lookahead and stack -0x55c8e75d8340->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53bd0 } -0x55c8e75d8320->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53bd0 } -0x55c8e75d8300->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53bd0 } -0x55c8e75d82e0->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53bd0 } -0x7fff53a53bd0->Object::~Object { 0x7fff53a53bd0 } +0x7ffcc897beef->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340 } +0x7ffcc897bfd0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897beef } +0x7ffcc897beef->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897beef, 0x7ffcc897bfd0 } +Next token is token 'p' (0x7ffcc897bfd0 'p'Exception caught: cleaning lookahead and stack +0x55b81a44e340->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897bfd0 } +0x55b81a44e320->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bfd0 } +0x55b81a44e300->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bfd0 } +0x55b81a44e2e0->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897bfd0 } +0x7ffcc897bfd0->Object::~Object { 0x7ffcc897bfd0 } exception caught: printer end { } ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal +stderr: +stderr: stderr: +stdout: +exception caught: yylex Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff53a53aef->Object::Object { } -0x7fff53a53bd0->Object::Object { 0x7fff53a53aef } -0x7fff53a53aef->Object::~Object { 0x7fff53a53aef, 0x7fff53a53bd0 } -Next token is token 'a' (0x7fff53a53bd0 'a') -0x7fff53a53b10->Object::Object { 0x7fff53a53bd0 } -0x7fff53a53bd0->Object::~Object { 0x7fff53a53b10, 0x7fff53a53bd0 } -Shifting token 'a' (0x7fff53a53b10 'a') -0x55c8e75d82e0->Object::Object { 0x7fff53a53b10 } -0x7fff53a53b10->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53b10 } +0x7ffcc897beef->Object::Object { } +0x7ffcc897bfd0->Object::Object { 0x7ffcc897beef } +0x7ffcc897beef->Object::~Object { 0x7ffcc897beef, 0x7ffcc897bfd0 } +Next token is token 'a' (0x7ffcc897bfd0 'a') +0x7ffcc897bf10->Object::Object { 0x7ffcc897bfd0 } +0x7ffcc897bfd0->Object::~Object { 0x7ffcc897bf10, 0x7ffcc897bfd0 } +Shifting token 'a' (0x7ffcc897bf10 'a') +0x55b81a44e2e0->Object::Object { 0x7ffcc897bf10 } +0x7ffcc897bf10->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897bf10 } Entering state 1 Stack now 0 1 -0x7fff53a53bf0->Object::Object { 0x55c8e75d82e0 } +0x7ffcc897bff0->Object::Object { 0x55b81a44e2e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c8e75d82e0 'a') --> $$ = nterm item (0x7fff53a53bf0 'a') -0x55c8e75d82e0->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53bf0 } -0x55c8e75d82e0->Object::Object { 0x7fff53a53bf0 } -0x7fff53a53bf0->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53bf0 } + $1 = token 'a' (0x55b81a44e2e0 'a') +-> $$ = nterm item (0x7ffcc897bff0 'a') +0x55b81a44e2e0->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897bff0 } +0x55b81a44e2e0->Object::Object { 0x7ffcc897bff0 } +0x7ffcc897bff0->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897bff0 } Entering state 10 Stack now 0 10 Reading a token -0x7fff53a53aef->Object::Object { 0x55c8e75d82e0 } -0x7fff53a53bd0->Object::Object { 0x55c8e75d82e0, 0x7fff53a53aef } -0x7fff53a53aef->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53aef, 0x7fff53a53bd0 } -Next token is token 'a' (0x7fff53a53bd0 'a') -0x7fff53a53b10->Object::Object { 0x55c8e75d82e0, 0x7fff53a53bd0 } -0x7fff53a53bd0->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53b10, 0x7fff53a53bd0 } -Shifting token 'a' (0x7fff53a53b10 'a') -0x55c8e75d8300->Object::Object { 0x55c8e75d82e0, 0x7fff53a53b10 } -0x7fff53a53b10->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53b10 } +0x7ffcc897beef->Object::Object { 0x55b81a44e2e0 } +0x7ffcc897bfd0->Object::Object { 0x55b81a44e2e0, 0x7ffcc897beef } +0x7ffcc897beef->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897beef, 0x7ffcc897bfd0 } +Next token is token 'a' (0x7ffcc897bfd0 'a') +0x7ffcc897bf10->Object::Object { 0x55b81a44e2e0, 0x7ffcc897bfd0 } +0x7ffcc897bfd0->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897bf10, 0x7ffcc897bfd0 } +Shifting token 'a' (0x7ffcc897bf10 'a') +0x55b81a44e300->Object::Object { 0x55b81a44e2e0, 0x7ffcc897bf10 } +0x7ffcc897bf10->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bf10 } Entering state 1 Stack now 0 10 1 -0x7fff53a53bf0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300 } +0x7ffcc897bff0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c8e75d8300 'a') --> $$ = nterm item (0x7fff53a53bf0 'a') -0x55c8e75d8300->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53bf0 } -0x55c8e75d8300->Object::Object { 0x55c8e75d82e0, 0x7fff53a53bf0 } -0x7fff53a53bf0->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53bf0 } + $1 = token 'a' (0x55b81a44e300 'a') +-> $$ = nterm item (0x7ffcc897bff0 'a') +0x55b81a44e300->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bff0 } +0x55b81a44e300->Object::Object { 0x55b81a44e2e0, 0x7ffcc897bff0 } +0x7ffcc897bff0->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bff0 } Entering state 10 Stack now 0 10 10 Reading a token -0x7fff53a53aef->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300 } -0x7fff53a53bd0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53aef } -0x7fff53a53aef->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53aef, 0x7fff53a53bd0 } -Next token is token 'a' (0x7fff53a53bd0 'a') -0x7fff53a53b10->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53bd0 } -0x7fff53a53bd0->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53b10, 0x7fff53a53bd0 } -Shifting token 'a' (0x7fff53a53b10 'a') -0x55c8e75d8320->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53b10 } -0x7fff53a53b10->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53b10 } +0x7ffcc897beef->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300 } +0x7ffcc897bfd0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897beef } +0x7ffcc897beef->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897beef, 0x7ffcc897bfd0 } +Next token is token 'a' (0x7ffcc897bfd0 'a') +0x7ffcc897bf10->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bfd0 } +0x7ffcc897bfd0->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bf10, 0x7ffcc897bfd0 } +Shifting token 'a' (0x7ffcc897bf10 'a') +0x55b81a44e320->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bf10 } +0x7ffcc897bf10->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bf10 } Entering state 1 Stack now 0 10 10 1 -0x7fff53a53bf0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320 } +0x7ffcc897bff0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c8e75d8320 'a') --> $$ = nterm item (0x7fff53a53bf0 'a') -0x55c8e75d8320->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53bf0 } -0x55c8e75d8320->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53bf0 } -0x7fff53a53bf0->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53bf0 } + $1 = token 'a' (0x55b81a44e320 'a') +-> $$ = nterm item (0x7ffcc897bff0 'a') +0x55b81a44e320->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bff0 } +0x55b81a44e320->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bff0 } +0x7ffcc897bff0->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bff0 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7fff53a53aef->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320 } -0x7fff53a53bd0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53aef } -0x7fff53a53aef->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53aef, 0x7fff53a53bd0 } -Next token is token 'a' (0x7fff53a53bd0 'a') -0x7fff53a53b10->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53bd0 } -0x7fff53a53bd0->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53b10, 0x7fff53a53bd0 } -Shifting token 'a' (0x7fff53a53b10 'a') -0x55c8e75d8340->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53b10 } -0x7fff53a53b10->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53b10 } +0x7ffcc897beef->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320 } +0x7ffcc897bfd0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897beef } +0x7ffcc897beef->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897beef, 0x7ffcc897bfd0 } +Next token is token 'a' (0x7ffcc897bfd0 'a') +0x7ffcc897bf10->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bfd0 } +0x7ffcc897bfd0->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bf10, 0x7ffcc897bfd0 } +Shifting token 'a' (0x7ffcc897bf10 'a') +0x55b81a44e340->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bf10 } +0x7ffcc897bf10->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897bf10 } Entering state 1 Stack now 0 10 10 10 1 -0x7fff53a53bf0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340 } +0x7ffcc897bff0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c8e75d8340 'a') --> $$ = nterm item (0x7fff53a53bf0 'a') -0x55c8e75d8340->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53bf0 } -0x55c8e75d8340->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53bf0 } -0x7fff53a53bf0->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53bf0 } + $1 = token 'a' (0x55b81a44e340 'a') +-> $$ = nterm item (0x7ffcc897bff0 'a') +0x55b81a44e340->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897bff0 } +0x55b81a44e340->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bff0 } +0x7ffcc897bff0->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897bff0 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7fff53a53aef->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340 } -0x7fff53a53bd0->Object::Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53aef } -0x7fff53a53aef->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53aef, 0x7fff53a53bd0 } -Next token is token 'p' (0x7fff53a53bd0 'p'Exception caught: cleaning lookahead and stack -0x55c8e75d8340->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x55c8e75d8340, 0x7fff53a53bd0 } -0x55c8e75d8320->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x55c8e75d8320, 0x7fff53a53bd0 } -0x55c8e75d8300->Object::~Object { 0x55c8e75d82e0, 0x55c8e75d8300, 0x7fff53a53bd0 } -0x55c8e75d82e0->Object::~Object { 0x55c8e75d82e0, 0x7fff53a53bd0 } -0x7fff53a53bd0->Object::~Object { 0x7fff53a53bd0 } +0x7ffcc897beef->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340 } +0x7ffcc897bfd0->Object::Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897beef } +0x7ffcc897beef->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897beef, 0x7ffcc897bfd0 } +Next token is token 'p' (0x7ffcc897bfd0 'p'Exception caught: cleaning lookahead and stack +0x55b81a44e340->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x55b81a44e340, 0x7ffcc897bfd0 } +0x55b81a44e320->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x55b81a44e320, 0x7ffcc897bfd0 } +0x55b81a44e300->Object::~Object { 0x55b81a44e2e0, 0x55b81a44e300, 0x7ffcc897bfd0 } +0x55b81a44e2e0->Object::~Object { 0x55b81a44e2e0, 0x7ffcc897bfd0 } +0x7ffcc897bfd0->Object::~Object { 0x7ffcc897bfd0 } exception caught: printer end { } ./c++.at:1363: grep '^exception caught: printer$' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in stdout: exception caught: printer ./c++.at:1363: $PREPARSER ./input aaaae stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1361: $PREPARSER ./input i +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stdout: exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +exception caught: initial-action +./c++.at:1066: $PREPARSER ./input < in +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ./c++.at:1363: $PREPARSER ./input aaaaE stderr: +./c++.at:1361: $PREPARSER ./input aaaap +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stdout: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +./c++.at:1361: $PREPARSER ./input --debug aaaap ./c++.at:1363: $PREPARSER ./input aaaaT +stdout: +stderr: +./c++.at:850: $PREPARSER ./input +stderr: stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x56089f3c5f40->Object::Object { } +Next token is token 'a' (0x56089f3c5f40 'a') +Shifting token 'a' (0x56089f3c5f40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56089f3c5f40 'a') +-> $$ = nterm item (0x56089f3c5f40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x56089f3c5f90->Object::Object { 0x56089f3c5f40 } +Next token is token 'a' (0x56089f3c5f90 'a') +Shifting token 'a' (0x56089f3c5f90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56089f3c5f90 'a') +-> $$ = nterm item (0x56089f3c5f90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x56089f3c5fe0->Object::Object { 0x56089f3c5f40, 0x56089f3c5f90 } +Next token is token 'a' (0x56089f3c5fe0 'a') +Shifting token 'a' (0x56089f3c5fe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56089f3c5fe0 'a') +-> $$ = nterm item (0x56089f3c5fe0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x56089f3c6030->Object::Object { 0x56089f3c5f40, 0x56089f3c5f90, 0x56089f3c5fe0 } +Next token is token 'a' (0x56089f3c6030 'a') +Shifting token 'a' (0x56089f3c6030 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56089f3c6030 'a') +-> $$ = nterm item (0x56089f3c6030 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x56089f3c6080->Object::Object { 0x56089f3c5f40, 0x56089f3c5f90, 0x56089f3c5fe0, 0x56089f3c6030 } +Next token is token 'p' (0x56089f3c6080 'p'Exception caught: cleaning lookahead and stack +0x56089f3c6080->Object::~Object { 0x56089f3c5f40, 0x56089f3c5f90, 0x56089f3c5fe0, 0x56089f3c6030, 0x56089f3c6080 } +0x56089f3c6030->Object::~Object { 0x56089f3c5f40, 0x56089f3c5f90, 0x56089f3c5fe0, 0x56089f3c6030 } +0x56089f3c5fe0->Object::~Object { 0x56089f3c5f40, 0x56089f3c5f90, 0x56089f3c5fe0 } +0x56089f3c5f90->Object::~Object { 0x56089f3c5f40, 0x56089f3c5f90 } +0x56089f3c5f40->Object::~Object { 0x56089f3c5f40 } +exception caught: printer +end { } +error: invalid character +stderr: +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaaR +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x56089f3c5f40->Object::Object { } +Next token is token 'a' (0x56089f3c5f40 'a') +Shifting token 'a' (0x56089f3c5f40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56089f3c5f40 'a') +-> $$ = nterm item (0x56089f3c5f40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x56089f3c5f90->Object::Object { 0x56089f3c5f40 } +Next token is token 'a' (0x56089f3c5f90 'a') +Shifting token 'a' (0x56089f3c5f90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56089f3c5f90 'a') +-> $$ = nterm item (0x56089f3c5f90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x56089f3c5fe0->Object::Object { 0x56089f3c5f40, 0x56089f3c5f90 } +Next token is token 'a' (0x56089f3c5fe0 'a') +Shifting token 'a' (0x56089f3c5fe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56089f3c5fe0 'a') +-> $$ = nterm item (0x56089f3c5fe0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x56089f3c6030->Object::Object { 0x56089f3c5f40, 0x56089f3c5f90, 0x56089f3c5fe0 } +Next token is token 'a' (0x56089f3c6030 'a') +Shifting token 'a' (0x56089f3c6030 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56089f3c6030 'a') +-> $$ = nterm item (0x56089f3c6030 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x56089f3c6080->Object::Object { 0x56089f3c5f40, 0x56089f3c5f90, 0x56089f3c5fe0, 0x56089f3c6030 } +Next token is token 'p' (0x56089f3c6080 'p'Exception caught: cleaning lookahead and stack +0x56089f3c6080->Object::~Object { 0x56089f3c5f40, 0x56089f3c5f90, 0x56089f3c5fe0, 0x56089f3c6030, 0x56089f3c6080 } +0x56089f3c6030->Object::~Object { 0x56089f3c5f40, 0x56089f3c5f90, 0x56089f3c5fe0, 0x56089f3c6030 } +0x56089f3c5fe0->Object::~Object { 0x56089f3c5f40, 0x56089f3c5f90, 0x56089f3c5fe0 } +0x56089f3c5f90->Object::~Object { 0x56089f3c5f40, 0x56089f3c5f90 } +0x56089f3c5f40->Object::~Object { 0x56089f3c5f40 } +exception caught: printer +end { } stderr: +./c++.at:1361: grep '^exception caught: printer$' stderr +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stdout: +exception caught: printer ======== Testing with C++ standard flags: '' +./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stdout: -./c++.at:568: $here/modern -stdout: -Modern C++: 201703 -./c++.at:568: $PREPARSER ./list +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT stderr: -stdout: -./c++.at:856: $PREPARSER ./input +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: ./c++.at:1360: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./glr-regression.at:2231: $PREPARSER ./input Nwin ./c++.at:1360: $PREPARSER ./input aaaal stderr: -stderr: -./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 exception caught: yylex +stderr: +stdout: +./c++.at:856: $PREPARSER ./input ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2231: $PREPARSER ./input Owin stderr: -syntax error, unexpected 'n', expecting 'o' -./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input i stderr: -stdout: -./c++.at:570: $here/modern -./glr-regression.at:2231: $PREPARSER ./input Owio -stderr: +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS exception caught: initial-action -stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Modern C++: 201703 -stdout: -./glr-regression.at:2231: $PREPARSER ./input Nwio -./c++.at:570: $PREPARSER ./list -./c++.at:566: $here/modern -stderr: ./c++.at:1360: $PREPARSER ./input aaaap -syntax error, unexpected 'o', expecting 'n' -stdout: -stderr: -./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Modern C++: 202002 -./c++.at:566: $PREPARSER ./list -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: -776. glr-regression.at:2231: ok -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x55716a6bef40->Object::Object { } -Next token is token 'a' (0x55716a6bef40 'a') -Shifting token 'a' (0x55716a6bef40 'a') +0x5607d4c32f40->Object::Object { } +Next token is token 'a' (0x5607d4c32f40 'a') +Shifting token 'a' (0x5607d4c32f40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55716a6bef40 'a') --> $$ = nterm item (0x55716a6bef40 'a') + $1 = token 'a' (0x5607d4c32f40 'a') +-> $$ = nterm item (0x5607d4c32f40 'a') Entering state 11 Stack now 0 11 Reading a token -0x55716a6bef90->Object::Object { 0x55716a6bef40 } -Next token is token 'a' (0x55716a6bef90 'a') -Shifting token 'a' (0x55716a6bef90 'a') +0x5607d4c32f90->Object::Object { 0x5607d4c32f40 } +Next token is token 'a' (0x5607d4c32f90 'a') +Shifting token 'a' (0x5607d4c32f90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55716a6bef90 'a') --> $$ = nterm item (0x55716a6bef90 'a') + $1 = token 'a' (0x5607d4c32f90 'a') +-> $$ = nterm item (0x5607d4c32f90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x55716a6befe0->Object::Object { 0x55716a6bef40, 0x55716a6bef90 } -Next token is token 'a' (0x55716a6befe0 'a') -Shifting token 'a' (0x55716a6befe0 'a') +0x5607d4c32fe0->Object::Object { 0x5607d4c32f40, 0x5607d4c32f90 } +Next token is token 'a' (0x5607d4c32fe0 'a') +Shifting token 'a' (0x5607d4c32fe0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55716a6befe0 'a') --> $$ = nterm item (0x55716a6befe0 'a') + $1 = token 'a' (0x5607d4c32fe0 'a') +-> $$ = nterm item (0x5607d4c32fe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x55716a6bf030->Object::Object { 0x55716a6bef40, 0x55716a6bef90, 0x55716a6befe0 } -Next token is token 'a' (0x55716a6bf030 'a') -Shifting token 'a' (0x55716a6bf030 'a') +0x5607d4c33030->Object::Object { 0x5607d4c32f40, 0x5607d4c32f90, 0x5607d4c32fe0 } +Next token is token 'a' (0x5607d4c33030 'a') +Shifting token 'a' (0x5607d4c33030 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55716a6bf030 'a') --> $$ = nterm item (0x55716a6bf030 'a') + $1 = token 'a' (0x5607d4c33030 'a') +-> $$ = nterm item (0x5607d4c33030 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x55716a6bf080->Object::Object { 0x55716a6bef40, 0x55716a6bef90, 0x55716a6befe0, 0x55716a6bf030 } -Next token is token 'p' (0x55716a6bf080 'p'Exception caught: cleaning lookahead and stack -0x55716a6bf080->Object::~Object { 0x55716a6bef40, 0x55716a6bef90, 0x55716a6befe0, 0x55716a6bf030, 0x55716a6bf080 } -0x55716a6bf030->Object::~Object { 0x55716a6bef40, 0x55716a6bef90, 0x55716a6befe0, 0x55716a6bf030 } -0x55716a6befe0->Object::~Object { 0x55716a6bef40, 0x55716a6bef90, 0x55716a6befe0 } -0x55716a6bef90->Object::~Object { 0x55716a6bef40, 0x55716a6bef90 } -0x55716a6bef40->Object::~Object { 0x55716a6bef40 } +0x5607d4c33080->Object::Object { 0x5607d4c32f40, 0x5607d4c32f90, 0x5607d4c32fe0, 0x5607d4c33030 } +Next token is token 'p' (0x5607d4c33080 'p'Exception caught: cleaning lookahead and stack +0x5607d4c33080->Object::~Object { 0x5607d4c32f40, 0x5607d4c32f90, 0x5607d4c32fe0, 0x5607d4c33030, 0x5607d4c33080 } +0x5607d4c33030->Object::~Object { 0x5607d4c32f40, 0x5607d4c32f90, 0x5607d4c32fe0, 0x5607d4c33030 } +0x5607d4c32fe0->Object::~Object { 0x5607d4c32f40, 0x5607d4c32f90, 0x5607d4c32fe0 } +0x5607d4c32f90->Object::~Object { 0x5607d4c32f40, 0x5607d4c32f90 } +0x5607d4c32f40->Object::~Object { 0x5607d4c32f40 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x55716a6bef40->Object::Object { } -Next token is token 'a' (0x55716a6bef40 'a') -Shifting token 'a' (0x55716a6bef40 'a') +0x5607d4c32f40->Object::Object { } +Next token is token 'a' (0x5607d4c32f40 'a') +Shifting token 'a' (0x5607d4c32f40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55716a6bef40 'a') --> $$ = nterm item (0x55716a6bef40 'a') + $1 = token 'a' (0x5607d4c32f40 'a') +-> $$ = nterm item (0x5607d4c32f40 'a') Entering state 11 Stack now 0 11 Reading a token -0x55716a6bef90->Object::Object { 0x55716a6bef40 } -Next token is token 'a' (0x55716a6bef90 'a') -Shifting token 'a' (0x55716a6bef90 'a') +0x5607d4c32f90->Object::Object { 0x5607d4c32f40 } +Next token is token 'a' (0x5607d4c32f90 'a') +Shifting token 'a' (0x5607d4c32f90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55716a6bef90 'a') --> $$ = nterm item (0x55716a6bef90 'a') + $1 = token 'a' (0x5607d4c32f90 'a') +-> $$ = nterm item (0x5607d4c32f90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x55716a6befe0->Object::Object { 0x55716a6bef40, 0x55716a6bef90 } -Next token is token 'a' (0x55716a6befe0 'a') -Shifting token 'a' (0x55716a6befe0 'a') +0x5607d4c32fe0->Object::Object { 0x5607d4c32f40, 0x5607d4c32f90 } +Next token is token 'a' (0x5607d4c32fe0 'a') +Shifting token 'a' (0x5607d4c32fe0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55716a6befe0 'a') --> $$ = nterm item (0x55716a6befe0 'a') + $1 = token 'a' (0x5607d4c32fe0 'a') +-> $$ = nterm item (0x5607d4c32fe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x55716a6bf030->Object::Object { 0x55716a6bef40, 0x55716a6bef90, 0x55716a6befe0 } -Next token is token 'a' (0x55716a6bf030 'a') -Shifting token 'a' (0x55716a6bf030 'a') +0x5607d4c33030->Object::Object { 0x5607d4c32f40, 0x5607d4c32f90, 0x5607d4c32fe0 } +Next token is token 'a' (0x5607d4c33030 'a') +Shifting token 'a' (0x5607d4c33030 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55716a6bf030 'a') --> $$ = nterm item (0x55716a6bf030 'a') + $1 = token 'a' (0x5607d4c33030 'a') +-> $$ = nterm item (0x5607d4c33030 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x55716a6bf080->Object::Object { 0x55716a6bef40, 0x55716a6bef90, 0x55716a6befe0, 0x55716a6bf030 } -Next token is token 'p' (0x55716a6bf080 'p'Exception caught: cleaning lookahead and stack -0x55716a6bf080->Object::~Object { 0x55716a6bef40, 0x55716a6bef90, 0x55716a6befe0, 0x55716a6bf030, 0x55716a6bf080 } -0x55716a6bf030->Object::~Object { 0x55716a6bef40, 0x55716a6bef90, 0x55716a6befe0, 0x55716a6bf030 } -0x55716a6befe0->Object::~Object { 0x55716a6bef40, 0x55716a6bef90, 0x55716a6befe0 } -0x55716a6bef90->Object::~Object { 0x55716a6bef40, 0x55716a6bef90 } -0x55716a6bef40->Object::~Object { 0x55716a6bef40 } +0x5607d4c33080->Object::Object { 0x5607d4c32f40, 0x5607d4c32f90, 0x5607d4c32fe0, 0x5607d4c33030 } +Next token is token 'p' (0x5607d4c33080 'p'Exception caught: cleaning lookahead and stack +0x5607d4c33080->Object::~Object { 0x5607d4c32f40, 0x5607d4c32f90, 0x5607d4c32fe0, 0x5607d4c33030, 0x5607d4c33080 } +0x5607d4c33030->Object::~Object { 0x5607d4c32f40, 0x5607d4c32f90, 0x5607d4c32fe0, 0x5607d4c33030 } +0x5607d4c32fe0->Object::~Object { 0x5607d4c32f40, 0x5607d4c32f90, 0x5607d4c32fe0 } +0x5607d4c32f90->Object::~Object { 0x5607d4c32f40, 0x5607d4c32f90 } +0x5607d4c32f40->Object::~Object { 0x5607d4c32f40 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: -stderr: exception caught: printer ./c++.at:1360: $PREPARSER ./input aaaae -stdout: stderr: exception caught: syntax error ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ./c++.at:1360: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaaT stderr: -stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1362: $PREPARSER ./input aaaas ./c++.at:1360: $PREPARSER ./input aaaaR stderr: -stderr: -exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -stderr: -stdout: ./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:1362: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1362: $PREPARSER ./input aaaap -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffc60a8dfbf->Object::Object { } -0x7ffc60a8e0a0->Object::Object { 0x7ffc60a8dfbf } -0x7ffc60a8dfbf->Object::~Object { 0x7ffc60a8dfbf, 0x7ffc60a8e0a0 } -Next token is token 'a' (0x7ffc60a8e0a0 'a') -0x7ffc60a8dfe0->Object::Object { 0x7ffc60a8e0a0 } -0x7ffc60a8e0a0->Object::~Object { 0x7ffc60a8dfe0, 0x7ffc60a8e0a0 } -Shifting token 'a' (0x7ffc60a8dfe0 'a') -0x556fa706f2e0->Object::Object { 0x7ffc60a8dfe0 } -0x7ffc60a8dfe0->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8dfe0 } -Entering state 2 -Stack now 0 2 -0x7ffc60a8e0c0->Object::Object { 0x556fa706f2e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556fa706f2e0 'a') --> $$ = nterm item (0x7ffc60a8e0c0 'a') -0x556fa706f2e0->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8e0c0 } -0x556fa706f2e0->Object::Object { 0x7ffc60a8e0c0 } -0x7ffc60a8e0c0->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8e0c0 } -Entering state 11 -Stack now 0 11 -Reading a token -0x7ffc60a8dfbf->Object::Object { 0x556fa706f2e0 } -0x7ffc60a8e0a0->Object::Object { 0x556fa706f2e0, 0x7ffc60a8dfbf } -0x7ffc60a8dfbf->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8dfbf, 0x7ffc60a8e0a0 } -Next token is token 'a' (0x7ffc60a8e0a0 'a') -0x7ffc60a8dfe0->Object::Object { 0x556fa706f2e0, 0x7ffc60a8e0a0 } -0x7ffc60a8e0a0->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8dfe0, 0x7ffc60a8e0a0 } -Shifting token 'a' (0x7ffc60a8dfe0 'a') -0x556fa706f300->Object::Object { 0x556fa706f2e0, 0x7ffc60a8dfe0 } -0x7ffc60a8dfe0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8dfe0 } -Entering state 2 -Stack now 0 11 2 -0x7ffc60a8e0c0->Object::Object { 0x556fa706f2e0, 0x556fa706f300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556fa706f300 'a') --> $$ = nterm item (0x7ffc60a8e0c0 'a') -0x556fa706f300->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8e0c0 } -0x556fa706f300->Object::Object { 0x556fa706f2e0, 0x7ffc60a8e0c0 } -0x7ffc60a8e0c0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8e0c0 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0x7ffc60a8dfbf->Object::Object { 0x556fa706f2e0, 0x556fa706f300 } -0x7ffc60a8e0a0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8dfbf } -0x7ffc60a8dfbf->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8dfbf, 0x7ffc60a8e0a0 } -Next token is token 'a' (0x7ffc60a8e0a0 'a') -0x7ffc60a8dfe0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8e0a0 } -0x7ffc60a8e0a0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8dfe0, 0x7ffc60a8e0a0 } -Shifting token 'a' (0x7ffc60a8dfe0 'a') -0x556fa706f320->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8dfe0 } -0x7ffc60a8dfe0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8dfe0 } -Entering state 2 -Stack now 0 11 11 2 -0x7ffc60a8e0c0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556fa706f320 'a') --> $$ = nterm item (0x7ffc60a8e0c0 'a') -0x556fa706f320->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8e0c0 } -0x556fa706f320->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8e0c0 } -0x7ffc60a8e0c0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8e0c0 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x7ffc60a8dfbf->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320 } -0x7ffc60a8e0a0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8dfbf } -0x7ffc60a8dfbf->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8dfbf, 0x7ffc60a8e0a0 } -Next token is token 'a' (0x7ffc60a8e0a0 'a') -0x7ffc60a8dfe0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8e0a0 } -0x7ffc60a8e0a0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8dfe0, 0x7ffc60a8e0a0 } -Shifting token 'a' (0x7ffc60a8dfe0 'a') -0x556fa706f340->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8dfe0 } -0x7ffc60a8dfe0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8dfe0 } -Entering state 2 -Stack now 0 11 11 11 2 -0x7ffc60a8e0c0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556fa706f340 'a') --> $$ = nterm item (0x7ffc60a8e0c0 'a') -0x556fa706f340->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8e0c0 } -0x556fa706f340->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8e0c0 } -0x7ffc60a8e0c0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8e0c0 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x7ffc60a8dfbf->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340 } -0x7ffc60a8e0a0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8dfbf } -0x7ffc60a8dfbf->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8dfbf, 0x7ffc60a8e0a0 } -Next token is token 'p' (0x7ffc60a8e0a0 'p'Exception caught: cleaning lookahead and stack -0x556fa706f340->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8e0a0 } -0x556fa706f320->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8e0a0 } -0x556fa706f300->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8e0a0 } -0x556fa706f2e0->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8e0a0 } -0x7ffc60a8e0a0->Object::~Object { 0x7ffc60a8e0a0 } -exception caught: printer -end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffc60a8dfbf->Object::Object { } -0x7ffc60a8e0a0->Object::Object { 0x7ffc60a8dfbf } -0x7ffc60a8dfbf->Object::~Object { 0x7ffc60a8dfbf, 0x7ffc60a8e0a0 } -Next token is token 'a' (0x7ffc60a8e0a0 'a') -0x7ffc60a8dfe0->Object::Object { 0x7ffc60a8e0a0 } -0x7ffc60a8e0a0->Object::~Object { 0x7ffc60a8dfe0, 0x7ffc60a8e0a0 } -Shifting token 'a' (0x7ffc60a8dfe0 'a') -0x556fa706f2e0->Object::Object { 0x7ffc60a8dfe0 } -0x7ffc60a8dfe0->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8dfe0 } -Entering state 2 -Stack now 0 2 -0x7ffc60a8e0c0->Object::Object { 0x556fa706f2e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556fa706f2e0 'a') --> $$ = nterm item (0x7ffc60a8e0c0 'a') -0x556fa706f2e0->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8e0c0 } -0x556fa706f2e0->Object::Object { 0x7ffc60a8e0c0 } -0x7ffc60a8e0c0->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8e0c0 } -Entering state 11 -Stack now 0 11 -Reading a token -0x7ffc60a8dfbf->Object::Object { 0x556fa706f2e0 } -0x7ffc60a8e0a0->Object::Object { 0x556fa706f2e0, 0x7ffc60a8dfbf } -0x7ffc60a8dfbf->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8dfbf, 0x7ffc60a8e0a0 } -Next token is token 'a' (0x7ffc60a8e0a0 'a') -0x7ffc60a8dfe0->Object::Object { 0x556fa706f2e0, 0x7ffc60a8e0a0 } -0x7ffc60a8e0a0->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8dfe0, 0x7ffc60a8e0a0 } -Shifting token 'a' (0x7ffc60a8dfe0 'a') -0x556fa706f300->Object::Object { 0x556fa706f2e0, 0x7ffc60a8dfe0 } -0x7ffc60a8dfe0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8dfe0 } -Entering state 2 -Stack now 0 11 2 -0x7ffc60a8e0c0->Object::Object { 0x556fa706f2e0, 0x556fa706f300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556fa706f300 'a') --> $$ = nterm item (0x7ffc60a8e0c0 'a') -0x556fa706f300->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8e0c0 } -0x556fa706f300->Object::Object { 0x556fa706f2e0, 0x7ffc60a8e0c0 } -0x7ffc60a8e0c0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8e0c0 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0x7ffc60a8dfbf->Object::Object { 0x556fa706f2e0, 0x556fa706f300 } -0x7ffc60a8e0a0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8dfbf } -0x7ffc60a8dfbf->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8dfbf, 0x7ffc60a8e0a0 } -Next token is token 'a' (0x7ffc60a8e0a0 'a') -0x7ffc60a8dfe0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8e0a0 } -0x7ffc60a8e0a0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8dfe0, 0x7ffc60a8e0a0 } -Shifting token 'a' (0x7ffc60a8dfe0 'a') -0x556fa706f320->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8dfe0 } -0x7ffc60a8dfe0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8dfe0 } -Entering state 2 -Stack now 0 11 11 2 -0x7ffc60a8e0c0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556fa706f320 'a') --> $$ = nterm item (0x7ffc60a8e0c0 'a') -0x556fa706f320->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8e0c0 } -0x556fa706f320->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8e0c0 } -0x7ffc60a8e0c0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8e0c0 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x7ffc60a8dfbf->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320 } -0x7ffc60a8e0a0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8dfbf } -0x7ffc60a8dfbf->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8dfbf, 0x7ffc60a8e0a0 } -Next token is token 'a' (0x7ffc60a8e0a0 'a') -0x7ffc60a8dfe0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8e0a0 } -0x7ffc60a8e0a0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8dfe0, 0x7ffc60a8e0a0 } -Shifting token 'a' (0x7ffc60a8dfe0 'a') -0x556fa706f340->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8dfe0 } -0x7ffc60a8dfe0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8dfe0 } -Entering state 2 -Stack now 0 11 11 11 2 -0x7ffc60a8e0c0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x556fa706f340 'a') --> $$ = nterm item (0x7ffc60a8e0c0 'a') -0x556fa706f340->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8e0c0 } -0x556fa706f340->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8e0c0 } -0x7ffc60a8e0c0->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8e0c0 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x7ffc60a8dfbf->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340 } -0x7ffc60a8e0a0->Object::Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8dfbf } -0x7ffc60a8dfbf->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8dfbf, 0x7ffc60a8e0a0 } -Next token is token 'p' (0x7ffc60a8e0a0 'p'Exception caught: cleaning lookahead and stack -0x556fa706f340->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x556fa706f340, 0x7ffc60a8e0a0 } -0x556fa706f320->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x556fa706f320, 0x7ffc60a8e0a0 } -0x556fa706f300->Object::~Object { 0x556fa706f2e0, 0x556fa706f300, 0x7ffc60a8e0a0 } -0x556fa706f2e0->Object::~Object { 0x556fa706f2e0, 0x7ffc60a8e0a0 } -0x7ffc60a8e0a0->Object::~Object { 0x7ffc60a8e0a0 } -exception caught: printer -end { } -./c++.at:1362: grep '^exception caught: printer$' stderr stderr: stdout: -./c++.at:571: $here/modern -stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae +./c++.at:573: $here/modern stdout: Modern C++: 201703 +./c++.at:573: $PREPARSER ./list stderr: -./c++.at:571: $PREPARSER ./list -stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Destroy: "0" Destroy: "0" Destroy: 1 @@ -269027,97 +269171,18 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:1362: $PREPARSER ./input aaaaT -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./c++.at:572: $here/modern ======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -./glr-regression.at:2151: $PREPARSER ./input --debug -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token 'a' () -Shifting token 'a' () -Entering state 1 -Reading a token -Next token is token 'b' () -Shifting token 'b' () -Entering state 3 -Reducing stack 0 by rule 3 (line 30): - $1 = token 'b' () --> $$ = nterm b () -Entering state 4 -Reading a token -Next token is token 'c' () -Shifting token 'c' () -Entering state 6 -Reducing stack 0 by rule 4 (line 31): --> $$ = nterm d () -Entering state 7 -Reading a token -Now at end of input. -Stack 0 Entering state 7 -Now at end of input. -Splitting off stack 1 from 0. -Reduced stack 1 by rule 2 (line 28); action deferred. Now in state 2. -Stack 1 Entering state 2 -Now at end of input. -Reduced stack 0 by rule 1 (line 27); action deferred. Now in state 2. -Merging stack 0 into stack 1. -Stack 1 Entering state 2 -Now at end of input. -Removing dead stacks. -Rename stack 1 -> 0. -On stack 0, shifting token "end of file" () -Stack 0 now in state 5 -Ambiguity detected. -Option 1, - start -> - 'a' - b - 'c' - d - -Option 2, - start -> - 'a' - b - 'c' - d - -syntax is ambiguous -Cleanup: popping token "end of file" () -Cleanup: popping unresolved nterm start () -Cleanup: popping nterm d () -Cleanup: popping token 'c' () -Cleanup: popping nterm b () -Cleanup: popping token 'a' () -./glr-regression.at:2151: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -773. glr-regression.at:2151: ok -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:567: $here/modern -stdout: Modern C++: 201703 -./c++.at:567: $PREPARSER ./list -stderr: +stdout: +./c++.at:572: $PREPARSER ./list +./c++.at:569: $here/modern stderr: stdout: Destroy: "0" @@ -269141,14 +269206,9 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:569: $here/modern -stdout: -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Modern C++: 201703 ./c++.at:569: $PREPARSER ./list -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Destroy: "0" Destroy: "0" @@ -269171,49 +269231,48 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -stdout: -./c++.at:850: $PREPARSER ./input ./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ======== Testing with C++ standard flags: '' ./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: +./c++.at:574: $here/modern stdout: -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in +Modern C++: 201703 +./c++.at:574: $PREPARSER ./list stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:572: $here/modern +./c++.at:571: $here/modern +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stdout: -stderr: Modern C++: 201703 -./c++.at:572: $PREPARSER ./list -stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: +./c++.at:571: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -269236,56 +269295,33 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -stdout: -stderr: -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: ./check -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -683. c++.at:1065: ok +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:1361: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input i +./c++.at:850: $PREPARSER ./input stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1361: $PREPARSER ./input aaaap -./c++.at:573: $here/modern +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -Modern C++: 201703 -./c++.at:573: $PREPARSER ./list -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap -stderr: +./c++.at:566: $here/modern +stdout: +Modern C++: 202002 +./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -269308,158 +269344,288 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./existing.at:808: $PREPARSER ./input +stderr: +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +621. existing.at:808: ok +stderr: +stdout: +stderr: +./c++.at:856: $PREPARSER ./input +stdout: +./c++.at:1362: $PREPARSER ./input aaaas +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i +stderr: +exception caught: initial-action +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1065: $PREPARSER ./input < in +./c++.at:1362: $PREPARSER ./input aaaap +stderr: +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input --debug aaaap +./c++.at:1065: $PREPARSER ./input < in +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x55f1beea0f40->Object::Object { } -Next token is token 'a' (0x55f1beea0f40 'a') -Shifting token 'a' (0x55f1beea0f40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f1beea0f40 'a') --> $$ = nterm item (0x55f1beea0f40 'a') -Entering state 10 -Stack now 0 10 +0x7ffd840179cf->Object::Object { } +0x7ffd84017ab0->Object::Object { 0x7ffd840179cf } +0x7ffd840179cf->Object::~Object { 0x7ffd840179cf, 0x7ffd84017ab0 } +Next token is token 'a' (0x7ffd84017ab0 'a') +0x7ffd840179f0->Object::Object { 0x7ffd84017ab0 } +0x7ffd84017ab0->Object::~Object { 0x7ffd840179f0, 0x7ffd84017ab0 } +Shifting token 'a' (0x7ffd840179f0 'a') +0x56392fda32e0->Object::Object { 0x7ffd840179f0 } +0x7ffd840179f0->Object::~Object { 0x56392fda32e0, 0x7ffd840179f0 } +Entering state 2 +Stack now 0 2 +0x7ffd84017ad0->Object::Object { 0x56392fda32e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56392fda32e0 'a') +-> $$ = nterm item (0x7ffd84017ad0 'a') +0x56392fda32e0->Object::~Object { 0x56392fda32e0, 0x7ffd84017ad0 } +0x56392fda32e0->Object::Object { 0x7ffd84017ad0 } +0x7ffd84017ad0->Object::~Object { 0x56392fda32e0, 0x7ffd84017ad0 } +Entering state 11 +Stack now 0 11 Reading a token -0x55f1beea0f90->Object::Object { 0x55f1beea0f40 } -Next token is token 'a' (0x55f1beea0f90 'a') -Shifting token 'a' (0x55f1beea0f90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f1beea0f90 'a') --> $$ = nterm item (0x55f1beea0f90 'a') -Entering state 10 -Stack now 0 10 10 +0x7ffd840179cf->Object::Object { 0x56392fda32e0 } +0x7ffd84017ab0->Object::Object { 0x56392fda32e0, 0x7ffd840179cf } +0x7ffd840179cf->Object::~Object { 0x56392fda32e0, 0x7ffd840179cf, 0x7ffd84017ab0 } +Next token is token 'a' (0x7ffd84017ab0 'a') +0x7ffd840179f0->Object::Object { 0x56392fda32e0, 0x7ffd84017ab0 } +0x7ffd84017ab0->Object::~Object { 0x56392fda32e0, 0x7ffd840179f0, 0x7ffd84017ab0 } +Shifting token 'a' (0x7ffd840179f0 'a') +0x56392fda3300->Object::Object { 0x56392fda32e0, 0x7ffd840179f0 } +0x7ffd840179f0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd840179f0 } +Entering state 2 +Stack now 0 11 2 +0x7ffd84017ad0->Object::Object { 0x56392fda32e0, 0x56392fda3300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56392fda3300 'a') +-> $$ = nterm item (0x7ffd84017ad0 'a') +0x56392fda3300->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd84017ad0 } +0x56392fda3300->Object::Object { 0x56392fda32e0, 0x7ffd84017ad0 } +0x7ffd84017ad0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd84017ad0 } +Entering state 11 +Stack now 0 11 11 Reading a token -0x55f1beea0fe0->Object::Object { 0x55f1beea0f40, 0x55f1beea0f90 } -Next token is token 'a' (0x55f1beea0fe0 'a') -Shifting token 'a' (0x55f1beea0fe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f1beea0fe0 'a') --> $$ = nterm item (0x55f1beea0fe0 'a') -Entering state 10 -Stack now 0 10 10 10 +0x7ffd840179cf->Object::Object { 0x56392fda32e0, 0x56392fda3300 } +0x7ffd84017ab0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd840179cf } +0x7ffd840179cf->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd840179cf, 0x7ffd84017ab0 } +Next token is token 'a' (0x7ffd84017ab0 'a') +0x7ffd840179f0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd84017ab0 } +0x7ffd84017ab0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd840179f0, 0x7ffd84017ab0 } +Shifting token 'a' (0x7ffd840179f0 'a') +0x56392fda3320->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd840179f0 } +0x7ffd840179f0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd840179f0 } +Entering state 2 +Stack now 0 11 11 2 +0x7ffd84017ad0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56392fda3320 'a') +-> $$ = nterm item (0x7ffd84017ad0 'a') +0x56392fda3320->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd84017ad0 } +0x56392fda3320->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd84017ad0 } +0x7ffd84017ad0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd84017ad0 } +Entering state 11 +Stack now 0 11 11 11 Reading a token -0x55f1beea1030->Object::Object { 0x55f1beea0f40, 0x55f1beea0f90, 0x55f1beea0fe0 } -Next token is token 'a' (0x55f1beea1030 'a') -Shifting token 'a' (0x55f1beea1030 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f1beea1030 'a') --> $$ = nterm item (0x55f1beea1030 'a') -Entering state 10 -Stack now 0 10 10 10 10 +0x7ffd840179cf->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320 } +0x7ffd84017ab0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd840179cf } +0x7ffd840179cf->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd840179cf, 0x7ffd84017ab0 } +Next token is token 'a' (0x7ffd84017ab0 'a') +0x7ffd840179f0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd84017ab0 } +0x7ffd84017ab0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd840179f0, 0x7ffd84017ab0 } +Shifting token 'a' (0x7ffd840179f0 'a') +0x56392fda3340->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd840179f0 } +0x7ffd840179f0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd840179f0 } +Entering state 2 +Stack now 0 11 11 11 2 +0x7ffd84017ad0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56392fda3340 'a') +-> $$ = nterm item (0x7ffd84017ad0 'a') +0x56392fda3340->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd84017ad0 } +0x56392fda3340->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd84017ad0 } +0x7ffd84017ad0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd84017ad0 } +Entering state 11 +Stack now 0 11 11 11 11 Reading a token -0x55f1beea1080->Object::Object { 0x55f1beea0f40, 0x55f1beea0f90, 0x55f1beea0fe0, 0x55f1beea1030 } -Next token is token 'p' (0x55f1beea1080 'p'Exception caught: cleaning lookahead and stack -0x55f1beea1080->Object::~Object { 0x55f1beea0f40, 0x55f1beea0f90, 0x55f1beea0fe0, 0x55f1beea1030, 0x55f1beea1080 } -0x55f1beea1030->Object::~Object { 0x55f1beea0f40, 0x55f1beea0f90, 0x55f1beea0fe0, 0x55f1beea1030 } -0x55f1beea0fe0->Object::~Object { 0x55f1beea0f40, 0x55f1beea0f90, 0x55f1beea0fe0 } -0x55f1beea0f90->Object::~Object { 0x55f1beea0f40, 0x55f1beea0f90 } -0x55f1beea0f40->Object::~Object { 0x55f1beea0f40 } +0x7ffd840179cf->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340 } +0x7ffd84017ab0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd840179cf } +0x7ffd840179cf->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd840179cf, 0x7ffd84017ab0 } +Next token is token 'p' (0x7ffd84017ab0 'p'Exception caught: cleaning lookahead and stack +0x56392fda3340->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd84017ab0 } +0x56392fda3320->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd84017ab0 } +0x56392fda3300->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd84017ab0 } +0x56392fda32e0->Object::~Object { 0x56392fda32e0, 0x7ffd84017ab0 } +0x7ffd84017ab0->Object::~Object { 0x7ffd84017ab0 } exception caught: printer end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x55f1beea0f40->Object::Object { } -Next token is token 'a' (0x55f1beea0f40 'a') -Shifting token 'a' (0x55f1beea0f40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f1beea0f40 'a') --> $$ = nterm item (0x55f1beea0f40 'a') -Entering state 10 -Stack now 0 10 +0x7ffd840179cf->Object::Object { } +0x7ffd84017ab0->Object::Object { 0x7ffd840179cf } +0x7ffd840179cf->Object::~Object { 0x7ffd840179cf, 0x7ffd84017ab0 } +Next token is token 'a' (0x7ffd84017ab0 'a') +0x7ffd840179f0->Object::Object { 0x7ffd84017ab0 } +0x7ffd84017ab0->Object::~Object { 0x7ffd840179f0, 0x7ffd84017ab0 } +Shifting token 'a' (0x7ffd840179f0 'a') +0x56392fda32e0->Object::Object { 0x7ffd840179f0 } +0x7ffd840179f0->Object::~Object { 0x56392fda32e0, 0x7ffd840179f0 } +Entering state 2 +Stack now 0 2 +0x7ffd84017ad0->Object::Object { 0x56392fda32e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56392fda32e0 'a') +-> $$ = nterm item (0x7ffd84017ad0 'a') +0x56392fda32e0->Object::~Object { 0x56392fda32e0, 0x7ffd84017ad0 } +0x56392fda32e0->Object::Object { 0x7ffd84017ad0 } +0x7ffd84017ad0->Object::~Object { 0x56392fda32e0, 0x7ffd84017ad0 } +Entering state 11 +Stack now 0 11 Reading a token -0x55f1beea0f90->Object::Object { 0x55f1beea0f40 } -Next token is token 'a' (0x55f1beea0f90 'a') -Shifting token 'a' (0x55f1beea0f90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f1beea0f90 'a') --> $$ = nterm item (0x55f1beea0f90 'a') -Entering state 10 -Stack now 0 10 10 +0x7ffd840179cf->Object::Object { 0x56392fda32e0 } +0x7ffd84017ab0->Object::Object { 0x56392fda32e0, 0x7ffd840179cf } +0x7ffd840179cf->Object::~Object { 0x56392fda32e0, 0x7ffd840179cf, 0x7ffd84017ab0 } +Next token is token 'a' (0x7ffd84017ab0 'a') +0x7ffd840179f0->Object::Object { 0x56392fda32e0, 0x7ffd84017ab0 } +0x7ffd84017ab0->Object::~Object { 0x56392fda32e0, 0x7ffd840179f0, 0x7ffd84017ab0 } +Shifting token 'a' (0x7ffd840179f0 'a') +0x56392fda3300->Object::Object { 0x56392fda32e0, 0x7ffd840179f0 } +0x7ffd840179f0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd840179f0 } +Entering state 2 +Stack now 0 11 2 +0x7ffd84017ad0->Object::Object { 0x56392fda32e0, 0x56392fda3300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56392fda3300 'a') +-> $$ = nterm item (0x7ffd84017ad0 'a') +0x56392fda3300->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd84017ad0 } +0x56392fda3300->Object::Object { 0x56392fda32e0, 0x7ffd84017ad0 } +0x7ffd84017ad0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd84017ad0 } +Entering state 11 +Stack now 0 11 11 Reading a token -0x55f1beea0fe0->Object::Object { 0x55f1beea0f40, 0x55f1beea0f90 } -Next token is token 'a' (0x55f1beea0fe0 'a') -Shifting token 'a' (0x55f1beea0fe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f1beea0fe0 'a') --> $$ = nterm item (0x55f1beea0fe0 'a') -Entering state 10 -Stack now 0 10 10 10 +0x7ffd840179cf->Object::Object { 0x56392fda32e0, 0x56392fda3300 } +0x7ffd84017ab0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd840179cf } +0x7ffd840179cf->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd840179cf, 0x7ffd84017ab0 } +Next token is token 'a' (0x7ffd84017ab0 'a') +0x7ffd840179f0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd84017ab0 } +0x7ffd84017ab0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd840179f0, 0x7ffd84017ab0 } +Shifting token 'a' (0x7ffd840179f0 'a') +0x56392fda3320->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd840179f0 } +0x7ffd840179f0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd840179f0 } +Entering state 2 +Stack now 0 11 11 2 +0x7ffd84017ad0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56392fda3320 'a') +-> $$ = nterm item (0x7ffd84017ad0 'a') +0x56392fda3320->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd84017ad0 } +0x56392fda3320->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd84017ad0 } +0x7ffd84017ad0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd84017ad0 } +Entering state 11 +Stack now 0 11 11 11 Reading a token -0x55f1beea1030->Object::Object { 0x55f1beea0f40, 0x55f1beea0f90, 0x55f1beea0fe0 } -Next token is token 'a' (0x55f1beea1030 'a') -Shifting token 'a' (0x55f1beea1030 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55f1beea1030 'a') --> $$ = nterm item (0x55f1beea1030 'a') -Entering state 10 -Stack now 0 10 10 10 10 +0x7ffd840179cf->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320 } +0x7ffd84017ab0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd840179cf } +0x7ffd840179cf->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd840179cf, 0x7ffd84017ab0 } +Next token is token 'a' (0x7ffd84017ab0 'a') +0x7ffd840179f0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd84017ab0 } +0x7ffd84017ab0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd840179f0, 0x7ffd84017ab0 } +Shifting token 'a' (0x7ffd840179f0 'a') +0x56392fda3340->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd840179f0 } +0x7ffd840179f0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd840179f0 } +Entering state 2 +Stack now 0 11 11 11 2 +0x7ffd84017ad0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56392fda3340 'a') +-> $$ = nterm item (0x7ffd84017ad0 'a') +0x56392fda3340->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd84017ad0 } +0x56392fda3340->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd84017ad0 } +0x7ffd84017ad0->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd84017ad0 } +Entering state 11 +Stack now 0 11 11 11 11 Reading a token -0x55f1beea1080->Object::Object { 0x55f1beea0f40, 0x55f1beea0f90, 0x55f1beea0fe0, 0x55f1beea1030 } -Next token is token 'p' (0x55f1beea1080 'p'Exception caught: cleaning lookahead and stack -0x55f1beea1080->Object::~Object { 0x55f1beea0f40, 0x55f1beea0f90, 0x55f1beea0fe0, 0x55f1beea1030, 0x55f1beea1080 } -0x55f1beea1030->Object::~Object { 0x55f1beea0f40, 0x55f1beea0f90, 0x55f1beea0fe0, 0x55f1beea1030 } -0x55f1beea0fe0->Object::~Object { 0x55f1beea0f40, 0x55f1beea0f90, 0x55f1beea0fe0 } -0x55f1beea0f90->Object::~Object { 0x55f1beea0f40, 0x55f1beea0f90 } -0x55f1beea0f40->Object::~Object { 0x55f1beea0f40 } +0x7ffd840179cf->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340 } +0x7ffd84017ab0->Object::Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd840179cf } +0x7ffd840179cf->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd840179cf, 0x7ffd84017ab0 } +Next token is token 'p' (0x7ffd84017ab0 'p'Exception caught: cleaning lookahead and stack +0x56392fda3340->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x56392fda3340, 0x7ffd84017ab0 } +0x56392fda3320->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x56392fda3320, 0x7ffd84017ab0 } +0x56392fda3300->Object::~Object { 0x56392fda32e0, 0x56392fda3300, 0x7ffd84017ab0 } +0x56392fda32e0->Object::~Object { 0x56392fda32e0, 0x7ffd84017ab0 } +0x7ffd84017ab0->Object::~Object { 0x7ffd84017ab0 } exception caught: printer end { } -./c++.at:1361: grep '^exception caught: printer$' stderr -======== Testing with C++ standard flags: '' +./c++.at:1362: grep '^exception caught: printer$' stderr +./c++.at:1065: $PREPARSER ./input < in stdout: exception caught: printer -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:1362: $PREPARSER ./input aaaae +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +683. c++.at:1065: ok +./c++.at:1362: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaT -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -686. c++.at:1361: ok +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT stderr: -stdout: -./c++.at:856: $PREPARSER ./input +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:857: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:574: $here/modern +./c++.at:568: $here/modern stdout: -Modern C++: 201703 -./c++.at:574: $PREPARSER ./list +Modern C++: 202002 +./c++.at:568: $PREPARSER ./list stderr: Destroy: "" Destroy: "" @@ -269482,20 +269648,13 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:1064: $PREPARSER ./input < in @@ -269513,13 +269672,38 @@ stderr: error: invalid character ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: 682. c++.at:1064: ok -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:567: $here/modern +stdout: +Modern C++: 202002 +./c++.at:567: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./c++.at:1363: $PREPARSER ./input aaaas @@ -269528,259 +269712,222 @@ ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaal stderr: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS exception caught: yylex ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./c++.at:1363: $PREPARSER ./input i stderr: -stdout: -./c++.at:568: $here/modern exception caught: initial-action -stdout: -Modern C++: 202002 -./c++.at:568: $PREPARSER ./list ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./c++.at:1363: $PREPARSER ./input aaaap -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input --debug aaaap -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffff172567f->Object::Object { } -0x7ffff1725760->Object::Object { 0x7ffff172567f } -0x7ffff172567f->Object::~Object { 0x7ffff172567f, 0x7ffff1725760 } -Next token is token 'a' (0x7ffff1725760 'a') -0x7ffff17256a0->Object::Object { 0x7ffff1725760 } -0x7ffff1725760->Object::~Object { 0x7ffff17256a0, 0x7ffff1725760 } -Shifting token 'a' (0x7ffff17256a0 'a') -0x55ba7dc382e0->Object::Object { 0x7ffff17256a0 } -0x7ffff17256a0->Object::~Object { 0x55ba7dc382e0, 0x7ffff17256a0 } +0x7ffe6cceb6df->Object::Object { } +0x7ffe6cceb7c0->Object::Object { 0x7ffe6cceb6df } +0x7ffe6cceb6df->Object::~Object { 0x7ffe6cceb6df, 0x7ffe6cceb7c0 } +Next token is token 'a' (0x7ffe6cceb7c0 'a') +0x7ffe6cceb700->Object::Object { 0x7ffe6cceb7c0 } +0x7ffe6cceb7c0->Object::~Object { 0x7ffe6cceb700, 0x7ffe6cceb7c0 } +Shifting token 'a' (0x7ffe6cceb700 'a') +0x564da8f812e0->Object::Object { 0x7ffe6cceb700 } +0x7ffe6cceb700->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb700 } Entering state 1 Stack now 0 1 -0x7ffff1725780->Object::Object { 0x55ba7dc382e0 } +0x7ffe6cceb7e0->Object::Object { 0x564da8f812e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ba7dc382e0 'a') --> $$ = nterm item (0x7ffff1725780 'a') -0x55ba7dc382e0->Object::~Object { 0x55ba7dc382e0, 0x7ffff1725780 } -0x55ba7dc382e0->Object::Object { 0x7ffff1725780 } -0x7ffff1725780->Object::~Object { 0x55ba7dc382e0, 0x7ffff1725780 } + $1 = token 'a' (0x564da8f812e0 'a') +-> $$ = nterm item (0x7ffe6cceb7e0 'a') +0x564da8f812e0->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb7e0 } +0x564da8f812e0->Object::Object { 0x7ffe6cceb7e0 } +0x7ffe6cceb7e0->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb7e0 } Entering state 10 Stack now 0 10 Reading a token -0x7ffff172567f->Object::Object { 0x55ba7dc382e0 } -0x7ffff1725760->Object::Object { 0x55ba7dc382e0, 0x7ffff172567f } -0x7ffff172567f->Object::~Object { 0x55ba7dc382e0, 0x7ffff172567f, 0x7ffff1725760 } -Next token is token 'a' (0x7ffff1725760 'a') -0x7ffff17256a0->Object::Object { 0x55ba7dc382e0, 0x7ffff1725760 } -0x7ffff1725760->Object::~Object { 0x55ba7dc382e0, 0x7ffff17256a0, 0x7ffff1725760 } -Shifting token 'a' (0x7ffff17256a0 'a') -0x55ba7dc38300->Object::Object { 0x55ba7dc382e0, 0x7ffff17256a0 } -0x7ffff17256a0->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff17256a0 } +0x7ffe6cceb6df->Object::Object { 0x564da8f812e0 } +0x7ffe6cceb7c0->Object::Object { 0x564da8f812e0, 0x7ffe6cceb6df } +0x7ffe6cceb6df->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb6df, 0x7ffe6cceb7c0 } +Next token is token 'a' (0x7ffe6cceb7c0 'a') +0x7ffe6cceb700->Object::Object { 0x564da8f812e0, 0x7ffe6cceb7c0 } +0x7ffe6cceb7c0->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb700, 0x7ffe6cceb7c0 } +Shifting token 'a' (0x7ffe6cceb700 'a') +0x564da8f81300->Object::Object { 0x564da8f812e0, 0x7ffe6cceb700 } +0x7ffe6cceb700->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb700 } Entering state 1 Stack now 0 10 1 -0x7ffff1725780->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300 } +0x7ffe6cceb7e0->Object::Object { 0x564da8f812e0, 0x564da8f81300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ba7dc38300 'a') --> $$ = nterm item (0x7ffff1725780 'a') -0x55ba7dc38300->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff1725780 } -0x55ba7dc38300->Object::Object { 0x55ba7dc382e0, 0x7ffff1725780 } -0x7ffff1725780->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff1725780 } + $1 = token 'a' (0x564da8f81300 'a') +-> $$ = nterm item (0x7ffe6cceb7e0 'a') +0x564da8f81300->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb7e0 } +0x564da8f81300->Object::Object { 0x564da8f812e0, 0x7ffe6cceb7e0 } +0x7ffe6cceb7e0->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb7e0 } Entering state 10 Stack now 0 10 10 Reading a token -0x7ffff172567f->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300 } -0x7ffff1725760->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff172567f } -0x7ffff172567f->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff172567f, 0x7ffff1725760 } -Next token is token 'a' (0x7ffff1725760 'a') -0x7ffff17256a0->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff1725760 } -0x7ffff1725760->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff17256a0, 0x7ffff1725760 } -Shifting token 'a' (0x7ffff17256a0 'a') -0x55ba7dc38320->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff17256a0 } -0x7ffff17256a0->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff17256a0 } +0x7ffe6cceb6df->Object::Object { 0x564da8f812e0, 0x564da8f81300 } +0x7ffe6cceb7c0->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb6df } +0x7ffe6cceb6df->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb6df, 0x7ffe6cceb7c0 } +Next token is token 'a' (0x7ffe6cceb7c0 'a') +0x7ffe6cceb700->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb7c0 } +0x7ffe6cceb7c0->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb700, 0x7ffe6cceb7c0 } +Shifting token 'a' (0x7ffe6cceb700 'a') +0x564da8f81320->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb700 } +0x7ffe6cceb700->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb700 } Entering state 1 Stack now 0 10 10 1 -0x7ffff1725780->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320 } +0x7ffe6cceb7e0->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ba7dc38320 'a') --> $$ = nterm item (0x7ffff1725780 'a') -0x55ba7dc38320->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff1725780 } -0x55ba7dc38320->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff1725780 } -0x7ffff1725780->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff1725780 } + $1 = token 'a' (0x564da8f81320 'a') +-> $$ = nterm item (0x7ffe6cceb7e0 'a') +0x564da8f81320->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb7e0 } +0x564da8f81320->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb7e0 } +0x7ffe6cceb7e0->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb7e0 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffff172567f->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320 } -0x7ffff1725760->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff172567f } -0x7ffff172567f->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff172567f, 0x7ffff1725760 } -Next token is token 'a' (0x7ffff1725760 'a') -0x7ffff17256a0->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff1725760 } -0x7ffff1725760->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff17256a0, 0x7ffff1725760 } -Shifting token 'a' (0x7ffff17256a0 'a') -0x55ba7dc38340->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff17256a0 } -0x7ffff17256a0->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff17256a0 } +0x7ffe6cceb6df->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320 } +0x7ffe6cceb7c0->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb6df } +0x7ffe6cceb6df->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb6df, 0x7ffe6cceb7c0 } +Next token is token 'a' (0x7ffe6cceb7c0 'a') +0x7ffe6cceb700->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb7c0 } +0x7ffe6cceb7c0->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb700, 0x7ffe6cceb7c0 } +Shifting token 'a' (0x7ffe6cceb700 'a') +0x564da8f81340->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb700 } +0x7ffe6cceb700->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb700 } Entering state 1 Stack now 0 10 10 10 1 -0x7ffff1725780->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340 } +0x7ffe6cceb7e0->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ba7dc38340 'a') --> $$ = nterm item (0x7ffff1725780 'a') -0x55ba7dc38340->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff1725780 } -0x55ba7dc38340->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff1725780 } -0x7ffff1725780->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff1725780 } + $1 = token 'a' (0x564da8f81340 'a') +-> $$ = nterm item (0x7ffe6cceb7e0 'a') +0x564da8f81340->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb7e0 } +0x564da8f81340->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb7e0 } +0x7ffe6cceb7e0->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb7e0 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffff172567f->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340 } -0x7ffff1725760->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff172567f } -0x7ffff172567f->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff172567f, 0x7ffff1725760 } -Next token is token 'p' (0x7ffff1725760 'p'Exception caught: cleaning lookahead and stack -0x55ba7dc38340->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff1725760 } -0x55ba7dc38320->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff1725760 } -0x55ba7dc38300->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff1725760 } -0x55ba7dc382e0->Object::~Object { 0x55ba7dc382e0, 0x7ffff1725760 } -0x7ffff1725760->Object::~Object { 0x7ffff1725760 } +0x7ffe6cceb6df->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340 } +0x7ffe6cceb7c0->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb6df } +0x7ffe6cceb6df->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb6df, 0x7ffe6cceb7c0 } +Next token is token 'p' (0x7ffe6cceb7c0 'p'Exception caught: cleaning lookahead and stack +0x564da8f81340->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb7c0 } +0x564da8f81320->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb7c0 } +0x564da8f81300->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb7c0 } +0x564da8f812e0->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb7c0 } +0x7ffe6cceb7c0->Object::~Object { 0x7ffe6cceb7c0 } exception caught: printer end { } -stdout: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:857: $PREPARSER ./input -stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffff172567f->Object::Object { } -0x7ffff1725760->Object::Object { 0x7ffff172567f } -0x7ffff172567f->Object::~Object { 0x7ffff172567f, 0x7ffff1725760 } -Next token is token 'a' (0x7ffff1725760 'a') -0x7ffff17256a0->Object::Object { 0x7ffff1725760 } -0x7ffff1725760->Object::~Object { 0x7ffff17256a0, 0x7ffff1725760 } -Shifting token 'a' (0x7ffff17256a0 'a') -0x55ba7dc382e0->Object::Object { 0x7ffff17256a0 } -0x7ffff17256a0->Object::~Object { 0x55ba7dc382e0, 0x7ffff17256a0 } +0x7ffe6cceb6df->Object::Object { } +0x7ffe6cceb7c0->Object::Object { 0x7ffe6cceb6df } +0x7ffe6cceb6df->Object::~Object { 0x7ffe6cceb6df, 0x7ffe6cceb7c0 } +Next token is token 'a' (0x7ffe6cceb7c0 'a') +0x7ffe6cceb700->Object::Object { 0x7ffe6cceb7c0 } +0x7ffe6cceb7c0->Object::~Object { 0x7ffe6cceb700, 0x7ffe6cceb7c0 } +Shifting token 'a' (0x7ffe6cceb700 'a') +0x564da8f812e0->Object::Object { 0x7ffe6cceb700 } +0x7ffe6cceb700->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb700 } Entering state 1 Stack now 0 1 -0x7ffff1725780->Object::Object { 0x55ba7dc382e0 } +0x7ffe6cceb7e0->Object::Object { 0x564da8f812e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ba7dc382e0 'a') --> $$ = nterm item (0x7ffff1725780 'a') -0x55ba7dc382e0->Object::~Object { 0x55ba7dc382e0, 0x7ffff1725780 } -0x55ba7dc382e0->Object::Object { 0x7ffff1725780 } -0x7ffff1725780->Object::~Object { 0x55ba7dc382e0, 0x7ffff1725780 } + $1 = token 'a' (0x564da8f812e0 'a') +-> $$ = nterm item (0x7ffe6cceb7e0 'a') +0x564da8f812e0->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb7e0 } +0x564da8f812e0->Object::Object { 0x7ffe6cceb7e0 } +0x7ffe6cceb7e0->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb7e0 } Entering state 10 Stack now 0 10 Reading a token -0x7ffff172567f->Object::Object { 0x55ba7dc382e0 } -0x7ffff1725760->Object::Object { 0x55ba7dc382e0, 0x7ffff172567f } -0x7ffff172567f->Object::~Object { 0x55ba7dc382e0, 0x7ffff172567f, 0x7ffff1725760 } -Next token is token 'a' (0x7ffff1725760 'a') -0x7ffff17256a0->Object::Object { 0x55ba7dc382e0, 0x7ffff1725760 } -0x7ffff1725760->Object::~Object { 0x55ba7dc382e0, 0x7ffff17256a0, 0x7ffff1725760 } -Shifting token 'a' (0x7ffff17256a0 'a') -0x55ba7dc38300->Object::Object { 0x55ba7dc382e0, 0x7ffff17256a0 } -0x7ffff17256a0->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff17256a0 } +0x7ffe6cceb6df->Object::Object { 0x564da8f812e0 } +0x7ffe6cceb7c0->Object::Object { 0x564da8f812e0, 0x7ffe6cceb6df } +0x7ffe6cceb6df->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb6df, 0x7ffe6cceb7c0 } +Next token is token 'a' (0x7ffe6cceb7c0 'a') +0x7ffe6cceb700->Object::Object { 0x564da8f812e0, 0x7ffe6cceb7c0 } +0x7ffe6cceb7c0->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb700, 0x7ffe6cceb7c0 } +Shifting token 'a' (0x7ffe6cceb700 'a') +0x564da8f81300->Object::Object { 0x564da8f812e0, 0x7ffe6cceb700 } +0x7ffe6cceb700->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb700 } Entering state 1 Stack now 0 10 1 -0x7ffff1725780->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300 } +0x7ffe6cceb7e0->Object::Object { 0x564da8f812e0, 0x564da8f81300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ba7dc38300 'a') --> $$ = nterm item (0x7ffff1725780 'a') -0x55ba7dc38300->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff1725780 } -0x55ba7dc38300->Object::Object { 0x55ba7dc382e0, 0x7ffff1725780 } -0x7ffff1725780->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff1725780 } + $1 = token 'a' (0x564da8f81300 'a') +-> $$ = nterm item (0x7ffe6cceb7e0 'a') +0x564da8f81300->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb7e0 } +0x564da8f81300->Object::Object { 0x564da8f812e0, 0x7ffe6cceb7e0 } +0x7ffe6cceb7e0->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb7e0 } Entering state 10 Stack now 0 10 10 Reading a token -0x7ffff172567f->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300 } -0x7ffff1725760->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff172567f } -0x7ffff172567f->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff172567f, 0x7ffff1725760 } -Next token is token 'a' (0x7ffff1725760 'a') -0x7ffff17256a0->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff1725760 } -0x7ffff1725760->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff17256a0, 0x7ffff1725760 } -Shifting token 'a' (0x7ffff17256a0 'a') -0x55ba7dc38320->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff17256a0 } -0x7ffff17256a0->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff17256a0 } +0x7ffe6cceb6df->Object::Object { 0x564da8f812e0, 0x564da8f81300 } +0x7ffe6cceb7c0->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb6df } +0x7ffe6cceb6df->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb6df, 0x7ffe6cceb7c0 } +Next token is token 'a' (0x7ffe6cceb7c0 'a') +0x7ffe6cceb700->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb7c0 } +0x7ffe6cceb7c0->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb700, 0x7ffe6cceb7c0 } +Shifting token 'a' (0x7ffe6cceb700 'a') +0x564da8f81320->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb700 } +0x7ffe6cceb700->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb700 } Entering state 1 Stack now 0 10 10 1 -0x7ffff1725780->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320 } +0x7ffe6cceb7e0->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ba7dc38320 'a') --> $$ = nterm item (0x7ffff1725780 'a') -0x55ba7dc38320->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff1725780 } -0x55ba7dc38320->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff1725780 } -0x7ffff1725780->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff1725780 } + $1 = token 'a' (0x564da8f81320 'a') +-> $$ = nterm item (0x7ffe6cceb7e0 'a') +0x564da8f81320->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb7e0 } +0x564da8f81320->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb7e0 } +0x7ffe6cceb7e0->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb7e0 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffff172567f->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320 } -0x7ffff1725760->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff172567f } -0x7ffff172567f->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff172567f, 0x7ffff1725760 } -Next token is token 'a' (0x7ffff1725760 'a') -0x7ffff17256a0->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff1725760 } -0x7ffff1725760->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff17256a0, 0x7ffff1725760 } -Shifting token 'a' (0x7ffff17256a0 'a') -0x55ba7dc38340->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff17256a0 } -0x7ffff17256a0->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff17256a0 } +0x7ffe6cceb6df->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320 } +0x7ffe6cceb7c0->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb6df } +0x7ffe6cceb6df->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb6df, 0x7ffe6cceb7c0 } +Next token is token 'a' (0x7ffe6cceb7c0 'a') +0x7ffe6cceb700->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb7c0 } +0x7ffe6cceb7c0->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb700, 0x7ffe6cceb7c0 } +Shifting token 'a' (0x7ffe6cceb700 'a') +0x564da8f81340->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb700 } +0x7ffe6cceb700->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb700 } Entering state 1 Stack now 0 10 10 10 1 -0x7ffff1725780->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340 } +0x7ffe6cceb7e0->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ba7dc38340 'a') --> $$ = nterm item (0x7ffff1725780 'a') -0x55ba7dc38340->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff1725780 } -0x55ba7dc38340->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff1725780 } -0x7ffff1725780->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff1725780 } + $1 = token 'a' (0x564da8f81340 'a') +-> $$ = nterm item (0x7ffe6cceb7e0 'a') +0x564da8f81340->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb7e0 } +0x564da8f81340->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb7e0 } +0x7ffe6cceb7e0->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb7e0 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffff172567f->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340 } -0x7ffff1725760->Object::Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff172567f } -0x7ffff172567f->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff172567f, 0x7ffff1725760 } -Next token is token 'p' (0x7ffff1725760 'p'Exception caught: cleaning lookahead and stack -0x55ba7dc38340->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x55ba7dc38340, 0x7ffff1725760 } -0x55ba7dc38320->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x55ba7dc38320, 0x7ffff1725760 } -0x55ba7dc38300->Object::~Object { 0x55ba7dc382e0, 0x55ba7dc38300, 0x7ffff1725760 } -0x55ba7dc382e0->Object::~Object { 0x55ba7dc382e0, 0x7ffff1725760 } -0x7ffff1725760->Object::~Object { 0x7ffff1725760 } +0x7ffe6cceb6df->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340 } +0x7ffe6cceb7c0->Object::Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb6df } +0x7ffe6cceb6df->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb6df, 0x7ffe6cceb7c0 } +Next token is token 'p' (0x7ffe6cceb7c0 'p'Exception caught: cleaning lookahead and stack +0x564da8f81340->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x564da8f81340, 0x7ffe6cceb7c0 } +0x564da8f81320->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x564da8f81320, 0x7ffe6cceb7c0 } +0x564da8f81300->Object::~Object { 0x564da8f812e0, 0x564da8f81300, 0x7ffe6cceb7c0 } +0x564da8f812e0->Object::~Object { 0x564da8f812e0, 0x7ffe6cceb7c0 } +0x7ffe6cceb7c0->Object::~Object { 0x7ffe6cceb7c0 } exception caught: printer end { } ./c++.at:1363: grep '^exception caught: printer$' stderr -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: exception caught: printer ./c++.at:1363: $PREPARSER ./input aaaae -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -269794,326 +269941,210 @@ ./c++.at:1363: $PREPARSER ./input aaaaR stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1362: $PREPARSER ./input aaaas +./c++.at:570: $here/modern +stdout: +Modern C++: 202002 +./c++.at:570: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:1361: $PREPARSER ./input aaaas stderr: exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal stderr: exception caught: yylex -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input i +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input i +stderr: +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaap stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input --debug aaaap +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff1d5a282f->Object::Object { } -0x7fff1d5a2910->Object::Object { 0x7fff1d5a282f } -0x7fff1d5a282f->Object::~Object { 0x7fff1d5a282f, 0x7fff1d5a2910 } -Next token is token 'a' (0x7fff1d5a2910 'a') -0x7fff1d5a2850->Object::Object { 0x7fff1d5a2910 } -0x7fff1d5a2910->Object::~Object { 0x7fff1d5a2850, 0x7fff1d5a2910 } -Shifting token 'a' (0x7fff1d5a2850 'a') -0x55ef12f912e0->Object::Object { 0x7fff1d5a2850 } -0x7fff1d5a2850->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a2850 } -Entering state 2 -Stack now 0 2 -0x7fff1d5a2930->Object::Object { 0x55ef12f912e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ef12f912e0 'a') --> $$ = nterm item (0x7fff1d5a2930 'a') -0x55ef12f912e0->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a2930 } -0x55ef12f912e0->Object::Object { 0x7fff1d5a2930 } -0x7fff1d5a2930->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a2930 } -Entering state 11 -Stack now 0 11 +0x5578cd6f2f40->Object::Object { } +Next token is token 'a' (0x5578cd6f2f40 'a') +Shifting token 'a' (0x5578cd6f2f40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5578cd6f2f40 'a') +-> $$ = nterm item (0x5578cd6f2f40 'a') +Entering state 10 +Stack now 0 10 Reading a token -0x7fff1d5a282f->Object::Object { 0x55ef12f912e0 } -0x7fff1d5a2910->Object::Object { 0x55ef12f912e0, 0x7fff1d5a282f } -0x7fff1d5a282f->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a282f, 0x7fff1d5a2910 } -Next token is token 'a' (0x7fff1d5a2910 'a') -0x7fff1d5a2850->Object::Object { 0x55ef12f912e0, 0x7fff1d5a2910 } -0x7fff1d5a2910->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a2850, 0x7fff1d5a2910 } -Shifting token 'a' (0x7fff1d5a2850 'a') -0x55ef12f91300->Object::Object { 0x55ef12f912e0, 0x7fff1d5a2850 } -0x7fff1d5a2850->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2850 } -Entering state 2 -Stack now 0 11 2 -0x7fff1d5a2930->Object::Object { 0x55ef12f912e0, 0x55ef12f91300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ef12f91300 'a') --> $$ = nterm item (0x7fff1d5a2930 'a') -0x55ef12f91300->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2930 } -0x55ef12f91300->Object::Object { 0x55ef12f912e0, 0x7fff1d5a2930 } -0x7fff1d5a2930->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2930 } -Entering state 11 -Stack now 0 11 11 +0x5578cd6f2f90->Object::Object { 0x5578cd6f2f40 } +Next token is token 'a' (0x5578cd6f2f90 'a') +Shifting token 'a' (0x5578cd6f2f90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5578cd6f2f90 'a') +-> $$ = nterm item (0x5578cd6f2f90 'a') +Entering state 10 +Stack now 0 10 10 Reading a token -0x7fff1d5a282f->Object::Object { 0x55ef12f912e0, 0x55ef12f91300 } -0x7fff1d5a2910->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a282f } -0x7fff1d5a282f->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a282f, 0x7fff1d5a2910 } -Next token is token 'a' (0x7fff1d5a2910 'a') -0x7fff1d5a2850->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2910 } -0x7fff1d5a2910->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2850, 0x7fff1d5a2910 } -Shifting token 'a' (0x7fff1d5a2850 'a') -0x55ef12f91320->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2850 } -0x7fff1d5a2850->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2850 } -Entering state 2 -Stack now 0 11 11 2 -0x7fff1d5a2930->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ef12f91320 'a') --> $$ = nterm item (0x7fff1d5a2930 'a') -0x55ef12f91320->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2930 } -0x55ef12f91320->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2930 } -0x7fff1d5a2930->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2930 } -Entering state 11 -Stack now 0 11 11 11 +0x5578cd6f2fe0->Object::Object { 0x5578cd6f2f40, 0x5578cd6f2f90 } +Next token is token 'a' (0x5578cd6f2fe0 'a') +Shifting token 'a' (0x5578cd6f2fe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5578cd6f2fe0 'a') +-> $$ = nterm item (0x5578cd6f2fe0 'a') +Entering state 10 +Stack now 0 10 10 10 Reading a token -0x7fff1d5a282f->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320 } -0x7fff1d5a2910->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a282f } -0x7fff1d5a282f->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a282f, 0x7fff1d5a2910 } -Next token is token 'a' (0x7fff1d5a2910 'a') -0x7fff1d5a2850->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2910 } -0x7fff1d5a2910->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2850, 0x7fff1d5a2910 } -Shifting token 'a' (0x7fff1d5a2850 'a') -0x55ef12f91340->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2850 } -0x7fff1d5a2850->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a2850 } -Entering state 2 -Stack now 0 11 11 11 2 -0x7fff1d5a2930->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ef12f91340 'a') --> $$ = nterm item (0x7fff1d5a2930 'a') -0x55ef12f91340->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a2930 } -0x55ef12f91340->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2930 } -0x7fff1d5a2930->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a2930 } -Entering state 11 -Stack now 0 11 11 11 11 +0x5578cd6f3030->Object::Object { 0x5578cd6f2f40, 0x5578cd6f2f90, 0x5578cd6f2fe0 } +Next token is token 'a' (0x5578cd6f3030 'a') +Shifting token 'a' (0x5578cd6f3030 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5578cd6f3030 'a') +-> $$ = nterm item (0x5578cd6f3030 'a') +Entering state 10 +Stack now 0 10 10 10 10 Reading a token -0x7fff1d5a282f->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340 } -0x7fff1d5a2910->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a282f } -0x7fff1d5a282f->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a282f, 0x7fff1d5a2910 } -Next token is token 'p' (0x7fff1d5a2910 'p'Exception caught: cleaning lookahead and stack -0x55ef12f91340->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a2910 } -0x55ef12f91320->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2910 } -0x55ef12f91300->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2910 } -0x55ef12f912e0->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a2910 } -0x7fff1d5a2910->Object::~Object { 0x7fff1d5a2910 } +0x5578cd6f3080->Object::Object { 0x5578cd6f2f40, 0x5578cd6f2f90, 0x5578cd6f2fe0, 0x5578cd6f3030 } +Next token is token 'p' (0x5578cd6f3080 'p'Exception caught: cleaning lookahead and stack +0x5578cd6f3080->Object::~Object { 0x5578cd6f2f40, 0x5578cd6f2f90, 0x5578cd6f2fe0, 0x5578cd6f3030, 0x5578cd6f3080 } +0x5578cd6f3030->Object::~Object { 0x5578cd6f2f40, 0x5578cd6f2f90, 0x5578cd6f2fe0, 0x5578cd6f3030 } +0x5578cd6f2fe0->Object::~Object { 0x5578cd6f2f40, 0x5578cd6f2f90, 0x5578cd6f2fe0 } +0x5578cd6f2f90->Object::~Object { 0x5578cd6f2f40, 0x5578cd6f2f90 } +0x5578cd6f2f40->Object::~Object { 0x5578cd6f2f40 } exception caught: printer end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff1d5a282f->Object::Object { } -0x7fff1d5a2910->Object::Object { 0x7fff1d5a282f } -0x7fff1d5a282f->Object::~Object { 0x7fff1d5a282f, 0x7fff1d5a2910 } -Next token is token 'a' (0x7fff1d5a2910 'a') -0x7fff1d5a2850->Object::Object { 0x7fff1d5a2910 } -0x7fff1d5a2910->Object::~Object { 0x7fff1d5a2850, 0x7fff1d5a2910 } -Shifting token 'a' (0x7fff1d5a2850 'a') -0x55ef12f912e0->Object::Object { 0x7fff1d5a2850 } -0x7fff1d5a2850->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a2850 } -Entering state 2 -Stack now 0 2 -0x7fff1d5a2930->Object::Object { 0x55ef12f912e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ef12f912e0 'a') --> $$ = nterm item (0x7fff1d5a2930 'a') -0x55ef12f912e0->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a2930 } -0x55ef12f912e0->Object::Object { 0x7fff1d5a2930 } -0x7fff1d5a2930->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a2930 } -Entering state 11 -Stack now 0 11 +0x5578cd6f2f40->Object::Object { } +Next token is token 'a' (0x5578cd6f2f40 'a') +Shifting token 'a' (0x5578cd6f2f40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5578cd6f2f40 'a') +-> $$ = nterm item (0x5578cd6f2f40 'a') +Entering state 10 +Stack now 0 10 Reading a token -0x7fff1d5a282f->Object::Object { 0x55ef12f912e0 } -0x7fff1d5a2910->Object::Object { 0x55ef12f912e0, 0x7fff1d5a282f } -0x7fff1d5a282f->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a282f, 0x7fff1d5a2910 } -Next token is token 'a' (0x7fff1d5a2910 'a') -0x7fff1d5a2850->Object::Object { 0x55ef12f912e0, 0x7fff1d5a2910 } -0x7fff1d5a2910->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a2850, 0x7fff1d5a2910 } -Shifting token 'a' (0x7fff1d5a2850 'a') -0x55ef12f91300->Object::Object { 0x55ef12f912e0, 0x7fff1d5a2850 } -0x7fff1d5a2850->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2850 } -Entering state 2 -Stack now 0 11 2 -0x7fff1d5a2930->Object::Object { 0x55ef12f912e0, 0x55ef12f91300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ef12f91300 'a') --> $$ = nterm item (0x7fff1d5a2930 'a') -0x55ef12f91300->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2930 } -0x55ef12f91300->Object::Object { 0x55ef12f912e0, 0x7fff1d5a2930 } -0x7fff1d5a2930->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2930 } -Entering state 11 -Stack now 0 11 11 +0x5578cd6f2f90->Object::Object { 0x5578cd6f2f40 } +Next token is token 'a' (0x5578cd6f2f90 'a') +Shifting token 'a' (0x5578cd6f2f90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5578cd6f2f90 'a') +-> $$ = nterm item (0x5578cd6f2f90 'a') +Entering state 10 +Stack now 0 10 10 Reading a token -0x7fff1d5a282f->Object::Object { 0x55ef12f912e0, 0x55ef12f91300 } -0x7fff1d5a2910->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a282f } -0x7fff1d5a282f->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a282f, 0x7fff1d5a2910 } -Next token is token 'a' (0x7fff1d5a2910 'a') -0x7fff1d5a2850->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2910 } -0x7fff1d5a2910->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2850, 0x7fff1d5a2910 } -Shifting token 'a' (0x7fff1d5a2850 'a') -0x55ef12f91320->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2850 } -0x7fff1d5a2850->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2850 } -Entering state 2 -Stack now 0 11 11 2 -0x7fff1d5a2930->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ef12f91320 'a') --> $$ = nterm item (0x7fff1d5a2930 'a') -0x55ef12f91320->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2930 } -0x55ef12f91320->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2930 } -0x7fff1d5a2930->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2930 } -Entering state 11 -Stack now 0 11 11 11 +0x5578cd6f2fe0->Object::Object { 0x5578cd6f2f40, 0x5578cd6f2f90 } +Next token is token 'a' (0x5578cd6f2fe0 'a') +Shifting token 'a' (0x5578cd6f2fe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5578cd6f2fe0 'a') +-> $$ = nterm item (0x5578cd6f2fe0 'a') +Entering state 10 +Stack now 0 10 10 10 Reading a token -0x7fff1d5a282f->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320 } -0x7fff1d5a2910->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a282f } -0x7fff1d5a282f->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a282f, 0x7fff1d5a2910 } -Next token is token 'a' (0x7fff1d5a2910 'a') -0x7fff1d5a2850->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2910 } -0x7fff1d5a2910->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2850, 0x7fff1d5a2910 } -Shifting token 'a' (0x7fff1d5a2850 'a') -0x55ef12f91340->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2850 } -0x7fff1d5a2850->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a2850 } -Entering state 2 -Stack now 0 11 11 11 2 -0x7fff1d5a2930->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55ef12f91340 'a') --> $$ = nterm item (0x7fff1d5a2930 'a') -0x55ef12f91340->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a2930 } -0x55ef12f91340->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2930 } -0x7fff1d5a2930->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a2930 } -Entering state 11 -Stack now 0 11 11 11 11 +0x5578cd6f3030->Object::Object { 0x5578cd6f2f40, 0x5578cd6f2f90, 0x5578cd6f2fe0 } +Next token is token 'a' (0x5578cd6f3030 'a') +Shifting token 'a' (0x5578cd6f3030 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5578cd6f3030 'a') +-> $$ = nterm item (0x5578cd6f3030 'a') +Entering state 10 +Stack now 0 10 10 10 10 Reading a token -0x7fff1d5a282f->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340 } -0x7fff1d5a2910->Object::Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a282f } -0x7fff1d5a282f->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a282f, 0x7fff1d5a2910 } -Next token is token 'p' (0x7fff1d5a2910 'p'Exception caught: cleaning lookahead and stack -0x55ef12f91340->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x55ef12f91340, 0x7fff1d5a2910 } -0x55ef12f91320->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x55ef12f91320, 0x7fff1d5a2910 } -0x55ef12f91300->Object::~Object { 0x55ef12f912e0, 0x55ef12f91300, 0x7fff1d5a2910 } -0x55ef12f912e0->Object::~Object { 0x55ef12f912e0, 0x7fff1d5a2910 } -0x7fff1d5a2910->Object::~Object { 0x7fff1d5a2910 } +0x5578cd6f3080->Object::Object { 0x5578cd6f2f40, 0x5578cd6f2f90, 0x5578cd6f2fe0, 0x5578cd6f3030 } +Next token is token 'p' (0x5578cd6f3080 'p'Exception caught: cleaning lookahead and stack +0x5578cd6f3080->Object::~Object { 0x5578cd6f2f40, 0x5578cd6f2f90, 0x5578cd6f2fe0, 0x5578cd6f3030, 0x5578cd6f3080 } +0x5578cd6f3030->Object::~Object { 0x5578cd6f2f40, 0x5578cd6f2f90, 0x5578cd6f2fe0, 0x5578cd6f3030 } +0x5578cd6f2fe0->Object::~Object { 0x5578cd6f2f40, 0x5578cd6f2f90, 0x5578cd6f2fe0 } +0x5578cd6f2f90->Object::~Object { 0x5578cd6f2f40, 0x5578cd6f2f90 } +0x5578cd6f2f40->Object::~Object { 0x5578cd6f2f40 } exception caught: printer end { } -./c++.at:1362: grep '^exception caught: printer$' stderr +stderr: +./c++.at:1361: grep '^exception caught: printer$' stderr +stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stdout: exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae +./c++.at:1361: $PREPARSER ./input aaaae stderr: exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./c++.at:571: $here/modern -stdout: -Modern C++: 202002 -./c++.at:571: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR stderr: -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +686. c++.at:1361: ok stderr: stdout: -./c++.at:570: $here/modern -stdout: -Modern C++: 202002 -./c++.at:570: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:857: $PREPARSER ./input +./c++.at:850: $PREPARSER ./input stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:851: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy ======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:1360: $PREPARSER ./input aaaas @@ -270137,57 +270168,57 @@ Entering state 0 Stack now 0 Reading a token -0x564554e5ef40->Object::Object { } -Next token is token 'a' (0x564554e5ef40 'a') -Shifting token 'a' (0x564554e5ef40 'a') +0x55b468c22f40->Object::Object { } +Next token is token 'a' (0x55b468c22f40 'a') +Shifting token 'a' (0x55b468c22f40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564554e5ef40 'a') --> $$ = nterm item (0x564554e5ef40 'a') + $1 = token 'a' (0x55b468c22f40 'a') +-> $$ = nterm item (0x55b468c22f40 'a') Entering state 11 Stack now 0 11 Reading a token -0x564554e5ef90->Object::Object { 0x564554e5ef40 } -Next token is token 'a' (0x564554e5ef90 'a') -Shifting token 'a' (0x564554e5ef90 'a') +0x55b468c22f90->Object::Object { 0x55b468c22f40 } +Next token is token 'a' (0x55b468c22f90 'a') +Shifting token 'a' (0x55b468c22f90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564554e5ef90 'a') --> $$ = nterm item (0x564554e5ef90 'a') + $1 = token 'a' (0x55b468c22f90 'a') +-> $$ = nterm item (0x55b468c22f90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x564554e5efe0->Object::Object { 0x564554e5ef40, 0x564554e5ef90 } -Next token is token 'a' (0x564554e5efe0 'a') -Shifting token 'a' (0x564554e5efe0 'a') +0x55b468c22fe0->Object::Object { 0x55b468c22f40, 0x55b468c22f90 } +Next token is token 'a' (0x55b468c22fe0 'a') +Shifting token 'a' (0x55b468c22fe0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564554e5efe0 'a') --> $$ = nterm item (0x564554e5efe0 'a') + $1 = token 'a' (0x55b468c22fe0 'a') +-> $$ = nterm item (0x55b468c22fe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x564554e5f030->Object::Object { 0x564554e5ef40, 0x564554e5ef90, 0x564554e5efe0 } -Next token is token 'a' (0x564554e5f030 'a') -Shifting token 'a' (0x564554e5f030 'a') +0x55b468c23030->Object::Object { 0x55b468c22f40, 0x55b468c22f90, 0x55b468c22fe0 } +Next token is token 'a' (0x55b468c23030 'a') +Shifting token 'a' (0x55b468c23030 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564554e5f030 'a') --> $$ = nterm item (0x564554e5f030 'a') + $1 = token 'a' (0x55b468c23030 'a') +-> $$ = nterm item (0x55b468c23030 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x564554e5f080->Object::Object { 0x564554e5ef40, 0x564554e5ef90, 0x564554e5efe0, 0x564554e5f030 } -Next token is token 'p' (0x564554e5f080 'p'Exception caught: cleaning lookahead and stack -0x564554e5f080->Object::~Object { 0x564554e5ef40, 0x564554e5ef90, 0x564554e5efe0, 0x564554e5f030, 0x564554e5f080 } -0x564554e5f030->Object::~Object { 0x564554e5ef40, 0x564554e5ef90, 0x564554e5efe0, 0x564554e5f030 } -0x564554e5efe0->Object::~Object { 0x564554e5ef40, 0x564554e5ef90, 0x564554e5efe0 } -0x564554e5ef90->Object::~Object { 0x564554e5ef40, 0x564554e5ef90 } -0x564554e5ef40->Object::~Object { 0x564554e5ef40 } +0x55b468c23080->Object::Object { 0x55b468c22f40, 0x55b468c22f90, 0x55b468c22fe0, 0x55b468c23030 } +Next token is token 'p' (0x55b468c23080 'p'Exception caught: cleaning lookahead and stack +0x55b468c23080->Object::~Object { 0x55b468c22f40, 0x55b468c22f90, 0x55b468c22fe0, 0x55b468c23030, 0x55b468c23080 } +0x55b468c23030->Object::~Object { 0x55b468c22f40, 0x55b468c22f90, 0x55b468c22fe0, 0x55b468c23030 } +0x55b468c22fe0->Object::~Object { 0x55b468c22f40, 0x55b468c22f90, 0x55b468c22fe0 } +0x55b468c22f90->Object::~Object { 0x55b468c22f40, 0x55b468c22f90 } +0x55b468c22f40->Object::~Object { 0x55b468c22f40 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -270196,57 +270227,57 @@ Entering state 0 Stack now 0 Reading a token -0x564554e5ef40->Object::Object { } -Next token is token 'a' (0x564554e5ef40 'a') -Shifting token 'a' (0x564554e5ef40 'a') +0x55b468c22f40->Object::Object { } +Next token is token 'a' (0x55b468c22f40 'a') +Shifting token 'a' (0x55b468c22f40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564554e5ef40 'a') --> $$ = nterm item (0x564554e5ef40 'a') + $1 = token 'a' (0x55b468c22f40 'a') +-> $$ = nterm item (0x55b468c22f40 'a') Entering state 11 Stack now 0 11 Reading a token -0x564554e5ef90->Object::Object { 0x564554e5ef40 } -Next token is token 'a' (0x564554e5ef90 'a') -Shifting token 'a' (0x564554e5ef90 'a') +0x55b468c22f90->Object::Object { 0x55b468c22f40 } +Next token is token 'a' (0x55b468c22f90 'a') +Shifting token 'a' (0x55b468c22f90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564554e5ef90 'a') --> $$ = nterm item (0x564554e5ef90 'a') + $1 = token 'a' (0x55b468c22f90 'a') +-> $$ = nterm item (0x55b468c22f90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x564554e5efe0->Object::Object { 0x564554e5ef40, 0x564554e5ef90 } -Next token is token 'a' (0x564554e5efe0 'a') -Shifting token 'a' (0x564554e5efe0 'a') +0x55b468c22fe0->Object::Object { 0x55b468c22f40, 0x55b468c22f90 } +Next token is token 'a' (0x55b468c22fe0 'a') +Shifting token 'a' (0x55b468c22fe0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564554e5efe0 'a') --> $$ = nterm item (0x564554e5efe0 'a') + $1 = token 'a' (0x55b468c22fe0 'a') +-> $$ = nterm item (0x55b468c22fe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x564554e5f030->Object::Object { 0x564554e5ef40, 0x564554e5ef90, 0x564554e5efe0 } -Next token is token 'a' (0x564554e5f030 'a') -Shifting token 'a' (0x564554e5f030 'a') +0x55b468c23030->Object::Object { 0x55b468c22f40, 0x55b468c22f90, 0x55b468c22fe0 } +Next token is token 'a' (0x55b468c23030 'a') +Shifting token 'a' (0x55b468c23030 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564554e5f030 'a') --> $$ = nterm item (0x564554e5f030 'a') + $1 = token 'a' (0x55b468c23030 'a') +-> $$ = nterm item (0x55b468c23030 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x564554e5f080->Object::Object { 0x564554e5ef40, 0x564554e5ef90, 0x564554e5efe0, 0x564554e5f030 } -Next token is token 'p' (0x564554e5f080 'p'Exception caught: cleaning lookahead and stack -0x564554e5f080->Object::~Object { 0x564554e5ef40, 0x564554e5ef90, 0x564554e5efe0, 0x564554e5f030, 0x564554e5f080 } -0x564554e5f030->Object::~Object { 0x564554e5ef40, 0x564554e5ef90, 0x564554e5efe0, 0x564554e5f030 } -0x564554e5efe0->Object::~Object { 0x564554e5ef40, 0x564554e5ef90, 0x564554e5efe0 } -0x564554e5ef90->Object::~Object { 0x564554e5ef40, 0x564554e5ef90 } -0x564554e5ef40->Object::~Object { 0x564554e5ef40 } +0x55b468c23080->Object::Object { 0x55b468c22f40, 0x55b468c22f90, 0x55b468c22fe0, 0x55b468c23030 } +Next token is token 'p' (0x55b468c23080 'p'Exception caught: cleaning lookahead and stack +0x55b468c23080->Object::~Object { 0x55b468c22f40, 0x55b468c22f90, 0x55b468c22fe0, 0x55b468c23030, 0x55b468c23080 } +0x55b468c23030->Object::~Object { 0x55b468c22f40, 0x55b468c22f90, 0x55b468c22fe0, 0x55b468c23030 } +0x55b468c22fe0->Object::~Object { 0x55b468c22f40, 0x55b468c22f90, 0x55b468c22fe0 } +0x55b468c22f90->Object::~Object { 0x55b468c22f40, 0x55b468c22f90 } +0x55b468c22f40->Object::~Object { 0x55b468c22f40 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr @@ -270269,78 +270300,55 @@ 685. c++.at:1360: ok stderr: stdout: -./existing.at:808: $PREPARSER ./input -stderr: -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -621. existing.at:808: ok +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:856: $PREPARSER ./input stderr: -stdout: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:567: $here/modern stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:574: $here/modern stdout: Modern C++: 202002 -./c++.at:567: $PREPARSER ./list +./c++.at:574: $PREPARSER ./list +./c++.at:857: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: -Destroy: "0" -Destroy: "0" +Destroy: "" +Destroy: "" Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () Destroy: "" Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:857: $PREPARSER ./input -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: +./c++.at:573: $here/modern stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:851: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy ======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:572: $here/modern -stdout: -stderr: +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Modern C++: 202002 -./c++.at:572: $PREPARSER ./list -stdout: -./c++.at:569: $here/modern +./c++.at:573: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -270363,13 +270371,18 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: $here/modern stdout: Modern C++: 202002 -./c++.at:569: $PREPARSER ./list -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: $PREPARSER ./list stderr: -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y Destroy: "0" Destroy: "0" Destroy: 1 @@ -270391,49 +270404,46 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:574: $here/modern -stdout: -Modern C++: 202002 -./c++.at:574: $PREPARSER ./list +./c++.at:1066: $PREPARSER ./input < in stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./c++.at:566: $here/modern +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: stdout: -Modern C++: 202302 -./c++.at:566: $PREPARSER ./list +./c++.at:851: $PREPARSER ./input +stderr: +stderr: +stdout: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: $here/modern +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +Modern C++: 202002 +./c++.at:572: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -270456,21 +270466,15 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -666. c++.at:566: ok -stderr: -stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:573: $here/modern +./c++.at:569: $here/modern stdout: Modern C++: 202002 -./c++.at:573: $PREPARSER ./list +./c++.at:569: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -270493,295 +270497,22 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:857: $PREPARSER ./input -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1363: $PREPARSER ./input aaaal -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffc00085c6f->Object::Object { } -0x7ffc00085d50->Object::Object { 0x7ffc00085c6f } -0x7ffc00085c6f->Object::~Object { 0x7ffc00085c6f, 0x7ffc00085d50 } -Next token is token 'a' (0x7ffc00085d50 'a') -0x7ffc00085c90->Object::Object { 0x7ffc00085d50 } -0x7ffc00085d50->Object::~Object { 0x7ffc00085c90, 0x7ffc00085d50 } -Shifting token 'a' (0x7ffc00085c90 'a') -0x55b07131e2e0->Object::Object { 0x7ffc00085c90 } -0x7ffc00085c90->Object::~Object { 0x55b07131e2e0, 0x7ffc00085c90 } -Entering state 1 -Stack now 0 1 -0x7ffc00085d70->Object::Object { 0x55b07131e2e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07131e2e0 'a') --> $$ = nterm item (0x7ffc00085d70 'a') -0x55b07131e2e0->Object::~Object { 0x55b07131e2e0, 0x7ffc00085d70 } -0x55b07131e2e0->Object::Object { 0x7ffc00085d70 } -0x7ffc00085d70->Object::~Object { 0x55b07131e2e0, 0x7ffc00085d70 } -Entering state 10 -Stack now 0 10 -Reading a token -0x7ffc00085c6f->Object::Object { 0x55b07131e2e0 } -0x7ffc00085d50->Object::Object { 0x55b07131e2e0, 0x7ffc00085c6f } -0x7ffc00085c6f->Object::~Object { 0x55b07131e2e0, 0x7ffc00085c6f, 0x7ffc00085d50 } -Next token is token 'a' (0x7ffc00085d50 'a') -0x7ffc00085c90->Object::Object { 0x55b07131e2e0, 0x7ffc00085d50 } -0x7ffc00085d50->Object::~Object { 0x55b07131e2e0, 0x7ffc00085c90, 0x7ffc00085d50 } -Shifting token 'a' (0x7ffc00085c90 'a') -0x55b07131e300->Object::Object { 0x55b07131e2e0, 0x7ffc00085c90 } -0x7ffc00085c90->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085c90 } -Entering state 1 -Stack now 0 10 1 -0x7ffc00085d70->Object::Object { 0x55b07131e2e0, 0x55b07131e300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07131e300 'a') --> $$ = nterm item (0x7ffc00085d70 'a') -0x55b07131e300->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085d70 } -0x55b07131e300->Object::Object { 0x55b07131e2e0, 0x7ffc00085d70 } -0x7ffc00085d70->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085d70 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0x7ffc00085c6f->Object::Object { 0x55b07131e2e0, 0x55b07131e300 } -0x7ffc00085d50->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085c6f } -0x7ffc00085c6f->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085c6f, 0x7ffc00085d50 } -Next token is token 'a' (0x7ffc00085d50 'a') -0x7ffc00085c90->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085d50 } -0x7ffc00085d50->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085c90, 0x7ffc00085d50 } -Shifting token 'a' (0x7ffc00085c90 'a') -0x55b07131e320->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085c90 } -0x7ffc00085c90->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085c90 } -Entering state 1 -Stack now 0 10 10 1 -0x7ffc00085d70->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07131e320 'a') --> $$ = nterm item (0x7ffc00085d70 'a') -0x55b07131e320->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085d70 } -0x55b07131e320->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085d70 } -0x7ffc00085d70->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085d70 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x7ffc00085c6f->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320 } -0x7ffc00085d50->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085c6f } -0x7ffc00085c6f->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085c6f, 0x7ffc00085d50 } -Next token is token 'a' (0x7ffc00085d50 'a') -0x7ffc00085c90->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085d50 } -0x7ffc00085d50->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085c90, 0x7ffc00085d50 } -Shifting token 'a' (0x7ffc00085c90 'a') -0x55b07131e340->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085c90 } -0x7ffc00085c90->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085c90 } -Entering state 1 -Stack now 0 10 10 10 1 -0x7ffc00085d70->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07131e340 'a') --> $$ = nterm item (0x7ffc00085d70 'a') -0x55b07131e340->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085d70 } -0x55b07131e340->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085d70 } -0x7ffc00085d70->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085d70 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x7ffc00085c6f->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340 } -0x7ffc00085d50->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085c6f } -0x7ffc00085c6f->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085c6f, 0x7ffc00085d50 } -Next token is token 'p' (0x7ffc00085d50 'p'Exception caught: cleaning lookahead and stack -0x55b07131e340->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085d50 } -0x55b07131e320->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085d50 } -0x55b07131e300->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085d50 } -0x55b07131e2e0->Object::~Object { 0x55b07131e2e0, 0x7ffc00085d50 } -0x7ffc00085d50->Object::~Object { 0x7ffc00085d50 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffc00085c6f->Object::Object { } -0x7ffc00085d50->Object::Object { 0x7ffc00085c6f } -0x7ffc00085c6f->Object::~Object { 0x7ffc00085c6f, 0x7ffc00085d50 } -Next token is token 'a' (0x7ffc00085d50 'a') -0x7ffc00085c90->Object::Object { 0x7ffc00085d50 } -0x7ffc00085d50->Object::~Object { 0x7ffc00085c90, 0x7ffc00085d50 } -Shifting token 'a' (0x7ffc00085c90 'a') -0x55b07131e2e0->Object::Object { 0x7ffc00085c90 } -0x7ffc00085c90->Object::~Object { 0x55b07131e2e0, 0x7ffc00085c90 } -Entering state 1 -Stack now 0 1 -0x7ffc00085d70->Object::Object { 0x55b07131e2e0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07131e2e0 'a') --> $$ = nterm item (0x7ffc00085d70 'a') -0x55b07131e2e0->Object::~Object { 0x55b07131e2e0, 0x7ffc00085d70 } -0x55b07131e2e0->Object::Object { 0x7ffc00085d70 } -0x7ffc00085d70->Object::~Object { 0x55b07131e2e0, 0x7ffc00085d70 } -Entering state 10 -Stack now 0 10 -Reading a token -0x7ffc00085c6f->Object::Object { 0x55b07131e2e0 } -0x7ffc00085d50->Object::Object { 0x55b07131e2e0, 0x7ffc00085c6f } -0x7ffc00085c6f->Object::~Object { 0x55b07131e2e0, 0x7ffc00085c6f, 0x7ffc00085d50 } -Next token is token 'a' (0x7ffc00085d50 'a') -0x7ffc00085c90->Object::Object { 0x55b07131e2e0, 0x7ffc00085d50 } -0x7ffc00085d50->Object::~Object { 0x55b07131e2e0, 0x7ffc00085c90, 0x7ffc00085d50 } -Shifting token 'a' (0x7ffc00085c90 'a') -0x55b07131e300->Object::Object { 0x55b07131e2e0, 0x7ffc00085c90 } -0x7ffc00085c90->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085c90 } -Entering state 1 -Stack now 0 10 1 -0x7ffc00085d70->Object::Object { 0x55b07131e2e0, 0x55b07131e300 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07131e300 'a') --> $$ = nterm item (0x7ffc00085d70 'a') -0x55b07131e300->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085d70 } -0x55b07131e300->Object::Object { 0x55b07131e2e0, 0x7ffc00085d70 } -0x7ffc00085d70->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085d70 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0x7ffc00085c6f->Object::Object { 0x55b07131e2e0, 0x55b07131e300 } -0x7ffc00085d50->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085c6f } -0x7ffc00085c6f->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085c6f, 0x7ffc00085d50 } -Next token is token 'a' (0x7ffc00085d50 'a') -0x7ffc00085c90->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085d50 } -0x7ffc00085d50->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085c90, 0x7ffc00085d50 } -Shifting token 'a' (0x7ffc00085c90 'a') -0x55b07131e320->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085c90 } -0x7ffc00085c90->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085c90 } -Entering state 1 -Stack now 0 10 10 1 -0x7ffc00085d70->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07131e320 'a') --> $$ = nterm item (0x7ffc00085d70 'a') -0x55b07131e320->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085d70 } -0x55b07131e320->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085d70 } -0x7ffc00085d70->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085d70 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x7ffc00085c6f->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320 } -0x7ffc00085d50->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085c6f } -0x7ffc00085c6f->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085c6f, 0x7ffc00085d50 } -Next token is token 'a' (0x7ffc00085d50 'a') -0x7ffc00085c90->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085d50 } -0x7ffc00085d50->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085c90, 0x7ffc00085d50 } -Shifting token 'a' (0x7ffc00085c90 'a') -0x55b07131e340->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085c90 } -0x7ffc00085c90->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085c90 } -Entering state 1 -Stack now 0 10 10 10 1 -0x7ffc00085d70->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07131e340 'a') --> $$ = nterm item (0x7ffc00085d70 'a') -0x55b07131e340->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085d70 } -0x55b07131e340->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085d70 } -0x7ffc00085d70->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085d70 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x7ffc00085c6f->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340 } -0x7ffc00085d50->Object::Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085c6f } -0x7ffc00085c6f->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085c6f, 0x7ffc00085d50 } -Next token is token 'p' (0x7ffc00085d50 'p'Exception caught: cleaning lookahead and stack -0x55b07131e340->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x55b07131e340, 0x7ffc00085d50 } -0x55b07131e320->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x55b07131e320, 0x7ffc00085d50 } -0x55b07131e300->Object::~Object { 0x55b07131e2e0, 0x55b07131e300, 0x7ffc00085d50 } -0x55b07131e2e0->Object::~Object { 0x55b07131e2e0, 0x7ffc00085d50 } -0x7ffc00085d50->Object::~Object { 0x7ffc00085d50 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -stderr: +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./c++.at:857: $PREPARSER ./input -./c++.at:1363: $PREPARSER ./input aaaaE stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1363: $PREPARSER ./input aaaaT ./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -688. c++.at:1363: ok -stderr: stdout: ./c++.at:1362: $PREPARSER ./input aaaas stderr: @@ -270804,99 +270535,99 @@ Entering state 0 Stack now 0 Reading a token -0x7fffe78e03df->Object::Object { } -0x7fffe78e04c0->Object::Object { 0x7fffe78e03df } -0x7fffe78e03df->Object::~Object { 0x7fffe78e03df, 0x7fffe78e04c0 } -Next token is token 'a' (0x7fffe78e04c0 'a') -0x7fffe78e0400->Object::Object { 0x7fffe78e04c0 } -0x7fffe78e04c0->Object::~Object { 0x7fffe78e0400, 0x7fffe78e04c0 } -Shifting token 'a' (0x7fffe78e0400 'a') -0x55f91e0dd2e0->Object::Object { 0x7fffe78e0400 } -0x7fffe78e0400->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e0400 } +0x7ffc574fa26f->Object::Object { } +0x7ffc574fa350->Object::Object { 0x7ffc574fa26f } +0x7ffc574fa26f->Object::~Object { 0x7ffc574fa26f, 0x7ffc574fa350 } +Next token is token 'a' (0x7ffc574fa350 'a') +0x7ffc574fa290->Object::Object { 0x7ffc574fa350 } +0x7ffc574fa350->Object::~Object { 0x7ffc574fa290, 0x7ffc574fa350 } +Shifting token 'a' (0x7ffc574fa290 'a') +0x55e40dd792e0->Object::Object { 0x7ffc574fa290 } +0x7ffc574fa290->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa290 } Entering state 2 Stack now 0 2 -0x7fffe78e04e0->Object::Object { 0x55f91e0dd2e0 } +0x7ffc574fa370->Object::Object { 0x55e40dd792e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f91e0dd2e0 'a') --> $$ = nterm item (0x7fffe78e04e0 'a') -0x55f91e0dd2e0->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e04e0 } -0x55f91e0dd2e0->Object::Object { 0x7fffe78e04e0 } -0x7fffe78e04e0->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e04e0 } + $1 = token 'a' (0x55e40dd792e0 'a') +-> $$ = nterm item (0x7ffc574fa370 'a') +0x55e40dd792e0->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa370 } +0x55e40dd792e0->Object::Object { 0x7ffc574fa370 } +0x7ffc574fa370->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa370 } Entering state 11 Stack now 0 11 Reading a token -0x7fffe78e03df->Object::Object { 0x55f91e0dd2e0 } -0x7fffe78e04c0->Object::Object { 0x55f91e0dd2e0, 0x7fffe78e03df } -0x7fffe78e03df->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e03df, 0x7fffe78e04c0 } -Next token is token 'a' (0x7fffe78e04c0 'a') -0x7fffe78e0400->Object::Object { 0x55f91e0dd2e0, 0x7fffe78e04c0 } -0x7fffe78e04c0->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e0400, 0x7fffe78e04c0 } -Shifting token 'a' (0x7fffe78e0400 'a') -0x55f91e0dd300->Object::Object { 0x55f91e0dd2e0, 0x7fffe78e0400 } -0x7fffe78e0400->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e0400 } +0x7ffc574fa26f->Object::Object { 0x55e40dd792e0 } +0x7ffc574fa350->Object::Object { 0x55e40dd792e0, 0x7ffc574fa26f } +0x7ffc574fa26f->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa26f, 0x7ffc574fa350 } +Next token is token 'a' (0x7ffc574fa350 'a') +0x7ffc574fa290->Object::Object { 0x55e40dd792e0, 0x7ffc574fa350 } +0x7ffc574fa350->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa290, 0x7ffc574fa350 } +Shifting token 'a' (0x7ffc574fa290 'a') +0x55e40dd79300->Object::Object { 0x55e40dd792e0, 0x7ffc574fa290 } +0x7ffc574fa290->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa290 } Entering state 2 Stack now 0 11 2 -0x7fffe78e04e0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300 } +0x7ffc574fa370->Object::Object { 0x55e40dd792e0, 0x55e40dd79300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f91e0dd300 'a') --> $$ = nterm item (0x7fffe78e04e0 'a') -0x55f91e0dd300->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e04e0 } -0x55f91e0dd300->Object::Object { 0x55f91e0dd2e0, 0x7fffe78e04e0 } -0x7fffe78e04e0->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e04e0 } + $1 = token 'a' (0x55e40dd79300 'a') +-> $$ = nterm item (0x7ffc574fa370 'a') +0x55e40dd79300->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa370 } +0x55e40dd79300->Object::Object { 0x55e40dd792e0, 0x7ffc574fa370 } +0x7ffc574fa370->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa370 } Entering state 11 Stack now 0 11 11 Reading a token -0x7fffe78e03df->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300 } -0x7fffe78e04c0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e03df } -0x7fffe78e03df->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e03df, 0x7fffe78e04c0 } -Next token is token 'a' (0x7fffe78e04c0 'a') -0x7fffe78e0400->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e04c0 } -0x7fffe78e04c0->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e0400, 0x7fffe78e04c0 } -Shifting token 'a' (0x7fffe78e0400 'a') -0x55f91e0dd320->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e0400 } -0x7fffe78e0400->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e0400 } +0x7ffc574fa26f->Object::Object { 0x55e40dd792e0, 0x55e40dd79300 } +0x7ffc574fa350->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa26f } +0x7ffc574fa26f->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa26f, 0x7ffc574fa350 } +Next token is token 'a' (0x7ffc574fa350 'a') +0x7ffc574fa290->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa350 } +0x7ffc574fa350->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa290, 0x7ffc574fa350 } +Shifting token 'a' (0x7ffc574fa290 'a') +0x55e40dd79320->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa290 } +0x7ffc574fa290->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa290 } Entering state 2 Stack now 0 11 11 2 -0x7fffe78e04e0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320 } +0x7ffc574fa370->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f91e0dd320 'a') --> $$ = nterm item (0x7fffe78e04e0 'a') -0x55f91e0dd320->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e04e0 } -0x55f91e0dd320->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e04e0 } -0x7fffe78e04e0->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e04e0 } + $1 = token 'a' (0x55e40dd79320 'a') +-> $$ = nterm item (0x7ffc574fa370 'a') +0x55e40dd79320->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa370 } +0x55e40dd79320->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa370 } +0x7ffc574fa370->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa370 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7fffe78e03df->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320 } -0x7fffe78e04c0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e03df } -0x7fffe78e03df->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e03df, 0x7fffe78e04c0 } -Next token is token 'a' (0x7fffe78e04c0 'a') -0x7fffe78e0400->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e04c0 } -0x7fffe78e04c0->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e0400, 0x7fffe78e04c0 } -Shifting token 'a' (0x7fffe78e0400 'a') -0x55f91e0dd340->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e0400 } -0x7fffe78e0400->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e0400 } +0x7ffc574fa26f->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320 } +0x7ffc574fa350->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa26f } +0x7ffc574fa26f->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa26f, 0x7ffc574fa350 } +Next token is token 'a' (0x7ffc574fa350 'a') +0x7ffc574fa290->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa350 } +0x7ffc574fa350->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa290, 0x7ffc574fa350 } +Shifting token 'a' (0x7ffc574fa290 'a') +0x55e40dd79340->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa290 } +0x7ffc574fa290->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa290 } Entering state 2 Stack now 0 11 11 11 2 -0x7fffe78e04e0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340 } +0x7ffc574fa370->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f91e0dd340 'a') --> $$ = nterm item (0x7fffe78e04e0 'a') -0x55f91e0dd340->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e04e0 } -0x55f91e0dd340->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e04e0 } -0x7fffe78e04e0->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e04e0 } + $1 = token 'a' (0x55e40dd79340 'a') +-> $$ = nterm item (0x7ffc574fa370 'a') +0x55e40dd79340->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa370 } +0x55e40dd79340->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa370 } +0x7ffc574fa370->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa370 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7fffe78e03df->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340 } -0x7fffe78e04c0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e03df } -0x7fffe78e03df->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e03df, 0x7fffe78e04c0 } -Next token is token 'p' (0x7fffe78e04c0 'p'Exception caught: cleaning lookahead and stack -0x55f91e0dd340->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e04c0 } -0x55f91e0dd320->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e04c0 } -0x55f91e0dd300->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e04c0 } -0x55f91e0dd2e0->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e04c0 } -0x7fffe78e04c0->Object::~Object { 0x7fffe78e04c0 } +0x7ffc574fa26f->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340 } +0x7ffc574fa350->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa26f } +0x7ffc574fa26f->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa26f, 0x7ffc574fa350 } +Next token is token 'p' (0x7ffc574fa350 'p'Exception caught: cleaning lookahead and stack +0x55e40dd79340->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa350 } +0x55e40dd79320->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa350 } +0x55e40dd79300->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa350 } +0x55e40dd792e0->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa350 } +0x7ffc574fa350->Object::~Object { 0x7ffc574fa350 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -270905,99 +270636,99 @@ Entering state 0 Stack now 0 Reading a token -0x7fffe78e03df->Object::Object { } -0x7fffe78e04c0->Object::Object { 0x7fffe78e03df } -0x7fffe78e03df->Object::~Object { 0x7fffe78e03df, 0x7fffe78e04c0 } -Next token is token 'a' (0x7fffe78e04c0 'a') -0x7fffe78e0400->Object::Object { 0x7fffe78e04c0 } -0x7fffe78e04c0->Object::~Object { 0x7fffe78e0400, 0x7fffe78e04c0 } -Shifting token 'a' (0x7fffe78e0400 'a') -0x55f91e0dd2e0->Object::Object { 0x7fffe78e0400 } -0x7fffe78e0400->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e0400 } +0x7ffc574fa26f->Object::Object { } +0x7ffc574fa350->Object::Object { 0x7ffc574fa26f } +0x7ffc574fa26f->Object::~Object { 0x7ffc574fa26f, 0x7ffc574fa350 } +Next token is token 'a' (0x7ffc574fa350 'a') +0x7ffc574fa290->Object::Object { 0x7ffc574fa350 } +0x7ffc574fa350->Object::~Object { 0x7ffc574fa290, 0x7ffc574fa350 } +Shifting token 'a' (0x7ffc574fa290 'a') +0x55e40dd792e0->Object::Object { 0x7ffc574fa290 } +0x7ffc574fa290->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa290 } Entering state 2 Stack now 0 2 -0x7fffe78e04e0->Object::Object { 0x55f91e0dd2e0 } +0x7ffc574fa370->Object::Object { 0x55e40dd792e0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f91e0dd2e0 'a') --> $$ = nterm item (0x7fffe78e04e0 'a') -0x55f91e0dd2e0->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e04e0 } -0x55f91e0dd2e0->Object::Object { 0x7fffe78e04e0 } -0x7fffe78e04e0->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e04e0 } + $1 = token 'a' (0x55e40dd792e0 'a') +-> $$ = nterm item (0x7ffc574fa370 'a') +0x55e40dd792e0->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa370 } +0x55e40dd792e0->Object::Object { 0x7ffc574fa370 } +0x7ffc574fa370->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa370 } Entering state 11 Stack now 0 11 Reading a token -0x7fffe78e03df->Object::Object { 0x55f91e0dd2e0 } -0x7fffe78e04c0->Object::Object { 0x55f91e0dd2e0, 0x7fffe78e03df } -0x7fffe78e03df->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e03df, 0x7fffe78e04c0 } -Next token is token 'a' (0x7fffe78e04c0 'a') -0x7fffe78e0400->Object::Object { 0x55f91e0dd2e0, 0x7fffe78e04c0 } -0x7fffe78e04c0->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e0400, 0x7fffe78e04c0 } -Shifting token 'a' (0x7fffe78e0400 'a') -0x55f91e0dd300->Object::Object { 0x55f91e0dd2e0, 0x7fffe78e0400 } -0x7fffe78e0400->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e0400 } +0x7ffc574fa26f->Object::Object { 0x55e40dd792e0 } +0x7ffc574fa350->Object::Object { 0x55e40dd792e0, 0x7ffc574fa26f } +0x7ffc574fa26f->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa26f, 0x7ffc574fa350 } +Next token is token 'a' (0x7ffc574fa350 'a') +0x7ffc574fa290->Object::Object { 0x55e40dd792e0, 0x7ffc574fa350 } +0x7ffc574fa350->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa290, 0x7ffc574fa350 } +Shifting token 'a' (0x7ffc574fa290 'a') +0x55e40dd79300->Object::Object { 0x55e40dd792e0, 0x7ffc574fa290 } +0x7ffc574fa290->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa290 } Entering state 2 Stack now 0 11 2 -0x7fffe78e04e0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300 } +0x7ffc574fa370->Object::Object { 0x55e40dd792e0, 0x55e40dd79300 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f91e0dd300 'a') --> $$ = nterm item (0x7fffe78e04e0 'a') -0x55f91e0dd300->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e04e0 } -0x55f91e0dd300->Object::Object { 0x55f91e0dd2e0, 0x7fffe78e04e0 } -0x7fffe78e04e0->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e04e0 } + $1 = token 'a' (0x55e40dd79300 'a') +-> $$ = nterm item (0x7ffc574fa370 'a') +0x55e40dd79300->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa370 } +0x55e40dd79300->Object::Object { 0x55e40dd792e0, 0x7ffc574fa370 } +0x7ffc574fa370->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa370 } Entering state 11 Stack now 0 11 11 Reading a token -0x7fffe78e03df->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300 } -0x7fffe78e04c0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e03df } -0x7fffe78e03df->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e03df, 0x7fffe78e04c0 } -Next token is token 'a' (0x7fffe78e04c0 'a') -0x7fffe78e0400->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e04c0 } -0x7fffe78e04c0->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e0400, 0x7fffe78e04c0 } -Shifting token 'a' (0x7fffe78e0400 'a') -0x55f91e0dd320->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e0400 } -0x7fffe78e0400->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e0400 } +0x7ffc574fa26f->Object::Object { 0x55e40dd792e0, 0x55e40dd79300 } +0x7ffc574fa350->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa26f } +0x7ffc574fa26f->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa26f, 0x7ffc574fa350 } +Next token is token 'a' (0x7ffc574fa350 'a') +0x7ffc574fa290->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa350 } +0x7ffc574fa350->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa290, 0x7ffc574fa350 } +Shifting token 'a' (0x7ffc574fa290 'a') +0x55e40dd79320->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa290 } +0x7ffc574fa290->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa290 } Entering state 2 Stack now 0 11 11 2 -0x7fffe78e04e0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320 } +0x7ffc574fa370->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f91e0dd320 'a') --> $$ = nterm item (0x7fffe78e04e0 'a') -0x55f91e0dd320->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e04e0 } -0x55f91e0dd320->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e04e0 } -0x7fffe78e04e0->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e04e0 } + $1 = token 'a' (0x55e40dd79320 'a') +-> $$ = nterm item (0x7ffc574fa370 'a') +0x55e40dd79320->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa370 } +0x55e40dd79320->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa370 } +0x7ffc574fa370->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa370 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7fffe78e03df->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320 } -0x7fffe78e04c0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e03df } -0x7fffe78e03df->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e03df, 0x7fffe78e04c0 } -Next token is token 'a' (0x7fffe78e04c0 'a') -0x7fffe78e0400->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e04c0 } -0x7fffe78e04c0->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e0400, 0x7fffe78e04c0 } -Shifting token 'a' (0x7fffe78e0400 'a') -0x55f91e0dd340->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e0400 } -0x7fffe78e0400->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e0400 } +0x7ffc574fa26f->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320 } +0x7ffc574fa350->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa26f } +0x7ffc574fa26f->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa26f, 0x7ffc574fa350 } +Next token is token 'a' (0x7ffc574fa350 'a') +0x7ffc574fa290->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa350 } +0x7ffc574fa350->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa290, 0x7ffc574fa350 } +Shifting token 'a' (0x7ffc574fa290 'a') +0x55e40dd79340->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa290 } +0x7ffc574fa290->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa290 } Entering state 2 Stack now 0 11 11 11 2 -0x7fffe78e04e0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340 } +0x7ffc574fa370->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f91e0dd340 'a') --> $$ = nterm item (0x7fffe78e04e0 'a') -0x55f91e0dd340->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e04e0 } -0x55f91e0dd340->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e04e0 } -0x7fffe78e04e0->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e04e0 } + $1 = token 'a' (0x55e40dd79340 'a') +-> $$ = nterm item (0x7ffc574fa370 'a') +0x55e40dd79340->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa370 } +0x55e40dd79340->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa370 } +0x7ffc574fa370->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa370 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7fffe78e03df->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340 } -0x7fffe78e04c0->Object::Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e03df } -0x7fffe78e03df->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e03df, 0x7fffe78e04c0 } -Next token is token 'p' (0x7fffe78e04c0 'p'Exception caught: cleaning lookahead and stack -0x55f91e0dd340->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x55f91e0dd340, 0x7fffe78e04c0 } -0x55f91e0dd320->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x55f91e0dd320, 0x7fffe78e04c0 } -0x55f91e0dd300->Object::~Object { 0x55f91e0dd2e0, 0x55f91e0dd300, 0x7fffe78e04c0 } -0x55f91e0dd2e0->Object::~Object { 0x55f91e0dd2e0, 0x7fffe78e04c0 } -0x7fffe78e04c0->Object::~Object { 0x7fffe78e04c0 } +0x7ffc574fa26f->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340 } +0x7ffc574fa350->Object::Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa26f } +0x7ffc574fa26f->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa26f, 0x7ffc574fa350 } +Next token is token 'p' (0x7ffc574fa350 'p'Exception caught: cleaning lookahead and stack +0x55e40dd79340->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x55e40dd79340, 0x7ffc574fa350 } +0x55e40dd79320->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x55e40dd79320, 0x7ffc574fa350 } +0x55e40dd79300->Object::~Object { 0x55e40dd792e0, 0x55e40dd79300, 0x7ffc574fa350 } +0x55e40dd792e0->Object::~Object { 0x55e40dd792e0, 0x7ffc574fa350 } +0x7ffc574fa350->Object::~Object { 0x7ffc574fa350 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr @@ -271009,8 +270740,11 @@ ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaE stderr: +stderr: +stdout: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ./c++.at:1362: $PREPARSER ./input aaaaT stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -271019,35 +270753,304 @@ ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 687. c++.at:1362: ok stderr: +stderr: stdout: +stdout: +./c++.at:1363: $PREPARSER ./input aaaas ./c++.at:851: $PREPARSER ./input stderr: +stderr: +exception caught: reduction ./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./c++.at:1363: $PREPARSER ./input aaaal ./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffee095195f->Object::Object { } +0x7ffee0951a40->Object::Object { 0x7ffee095195f } +0x7ffee095195f->Object::~Object { 0x7ffee095195f, 0x7ffee0951a40 } +Next token is token 'a' (0x7ffee0951a40 'a') +0x7ffee0951980->Object::Object { 0x7ffee0951a40 } +0x7ffee0951a40->Object::~Object { 0x7ffee0951980, 0x7ffee0951a40 } +Shifting token 'a' (0x7ffee0951980 'a') +0x55edecaab2e0->Object::Object { 0x7ffee0951980 } +0x7ffee0951980->Object::~Object { 0x55edecaab2e0, 0x7ffee0951980 } +Entering state 1 +Stack now 0 1 +0x7ffee0951a60->Object::Object { 0x55edecaab2e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55edecaab2e0 'a') +-> $$ = nterm item (0x7ffee0951a60 'a') +0x55edecaab2e0->Object::~Object { 0x55edecaab2e0, 0x7ffee0951a60 } +0x55edecaab2e0->Object::Object { 0x7ffee0951a60 } +0x7ffee0951a60->Object::~Object { 0x55edecaab2e0, 0x7ffee0951a60 } +Entering state 10 +Stack now 0 10 +Reading a token +0x7ffee095195f->Object::Object { 0x55edecaab2e0 } +0x7ffee0951a40->Object::Object { 0x55edecaab2e0, 0x7ffee095195f } +0x7ffee095195f->Object::~Object { 0x55edecaab2e0, 0x7ffee095195f, 0x7ffee0951a40 } +Next token is token 'a' (0x7ffee0951a40 'a') +0x7ffee0951980->Object::Object { 0x55edecaab2e0, 0x7ffee0951a40 } +0x7ffee0951a40->Object::~Object { 0x55edecaab2e0, 0x7ffee0951980, 0x7ffee0951a40 } +Shifting token 'a' (0x7ffee0951980 'a') +0x55edecaab300->Object::Object { 0x55edecaab2e0, 0x7ffee0951980 } +0x7ffee0951980->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951980 } +Entering state 1 +Stack now 0 10 1 +0x7ffee0951a60->Object::Object { 0x55edecaab2e0, 0x55edecaab300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55edecaab300 'a') +-> $$ = nterm item (0x7ffee0951a60 'a') +0x55edecaab300->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951a60 } +0x55edecaab300->Object::Object { 0x55edecaab2e0, 0x7ffee0951a60 } +0x7ffee0951a60->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951a60 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0x7ffee095195f->Object::Object { 0x55edecaab2e0, 0x55edecaab300 } +0x7ffee0951a40->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee095195f } +0x7ffee095195f->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee095195f, 0x7ffee0951a40 } +Next token is token 'a' (0x7ffee0951a40 'a') +0x7ffee0951980->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951a40 } +0x7ffee0951a40->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951980, 0x7ffee0951a40 } +Shifting token 'a' (0x7ffee0951980 'a') +0x55edecaab320->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951980 } +0x7ffee0951980->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951980 } +Entering state 1 +Stack now 0 10 10 1 +0x7ffee0951a60->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55edecaab320 'a') +-> $$ = nterm item (0x7ffee0951a60 'a') +0x55edecaab320->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951a60 } +0x55edecaab320->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951a60 } +0x7ffee0951a60->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951a60 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x7ffee095195f->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320 } +0x7ffee0951a40->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee095195f } +0x7ffee095195f->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee095195f, 0x7ffee0951a40 } +Next token is token 'a' (0x7ffee0951a40 'a') +0x7ffee0951980->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951a40 } +0x7ffee0951a40->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951980, 0x7ffee0951a40 } +Shifting token 'a' (0x7ffee0951980 'a') +0x55edecaab340->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951980 } +0x7ffee0951980->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee0951980 } +Entering state 1 +Stack now 0 10 10 10 1 +0x7ffee0951a60->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55edecaab340 'a') +-> $$ = nterm item (0x7ffee0951a60 'a') +0x55edecaab340->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee0951a60 } +0x55edecaab340->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951a60 } +0x7ffee0951a60->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee0951a60 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x7ffee095195f->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340 } +0x7ffee0951a40->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee095195f } +0x7ffee095195f->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee095195f, 0x7ffee0951a40 } +Next token is token 'p' (0x7ffee0951a40 'p'Exception caught: cleaning lookahead and stack +0x55edecaab340->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee0951a40 } +0x55edecaab320->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951a40 } +0x55edecaab300->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951a40 } +0x55edecaab2e0->Object::~Object { 0x55edecaab2e0, 0x7ffee0951a40 } +0x7ffee0951a40->Object::~Object { 0x7ffee0951a40 } +exception caught: printer +end { } +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffee095195f->Object::Object { } +0x7ffee0951a40->Object::Object { 0x7ffee095195f } +0x7ffee095195f->Object::~Object { 0x7ffee095195f, 0x7ffee0951a40 } +Next token is token 'a' (0x7ffee0951a40 'a') +0x7ffee0951980->Object::Object { 0x7ffee0951a40 } +0x7ffee0951a40->Object::~Object { 0x7ffee0951980, 0x7ffee0951a40 } +Shifting token 'a' (0x7ffee0951980 'a') +0x55edecaab2e0->Object::Object { 0x7ffee0951980 } +0x7ffee0951980->Object::~Object { 0x55edecaab2e0, 0x7ffee0951980 } +Entering state 1 +Stack now 0 1 +0x7ffee0951a60->Object::Object { 0x55edecaab2e0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55edecaab2e0 'a') +-> $$ = nterm item (0x7ffee0951a60 'a') +0x55edecaab2e0->Object::~Object { 0x55edecaab2e0, 0x7ffee0951a60 } +0x55edecaab2e0->Object::Object { 0x7ffee0951a60 } +0x7ffee0951a60->Object::~Object { 0x55edecaab2e0, 0x7ffee0951a60 } +Entering state 10 +Stack now 0 10 +Reading a token +0x7ffee095195f->Object::Object { 0x55edecaab2e0 } +0x7ffee0951a40->Object::Object { 0x55edecaab2e0, 0x7ffee095195f } +0x7ffee095195f->Object::~Object { 0x55edecaab2e0, 0x7ffee095195f, 0x7ffee0951a40 } +Next token is token 'a' (0x7ffee0951a40 'a') +0x7ffee0951980->Object::Object { 0x55edecaab2e0, 0x7ffee0951a40 } +0x7ffee0951a40->Object::~Object { 0x55edecaab2e0, 0x7ffee0951980, 0x7ffee0951a40 } +Shifting token 'a' (0x7ffee0951980 'a') +0x55edecaab300->Object::Object { 0x55edecaab2e0, 0x7ffee0951980 } +0x7ffee0951980->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951980 } +Entering state 1 +Stack now 0 10 1 +0x7ffee0951a60->Object::Object { 0x55edecaab2e0, 0x55edecaab300 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55edecaab300 'a') +-> $$ = nterm item (0x7ffee0951a60 'a') +0x55edecaab300->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951a60 } +0x55edecaab300->Object::Object { 0x55edecaab2e0, 0x7ffee0951a60 } +0x7ffee0951a60->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951a60 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0x7ffee095195f->Object::Object { 0x55edecaab2e0, 0x55edecaab300 } +0x7ffee0951a40->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee095195f } +0x7ffee095195f->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee095195f, 0x7ffee0951a40 } +Next token is token 'a' (0x7ffee0951a40 'a') +0x7ffee0951980->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951a40 } +0x7ffee0951a40->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951980, 0x7ffee0951a40 } +Shifting token 'a' (0x7ffee0951980 'a') +0x55edecaab320->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951980 } +0x7ffee0951980->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951980 } +Entering state 1 +Stack now 0 10 10 1 +0x7ffee0951a60->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55edecaab320 'a') +-> $$ = nterm item (0x7ffee0951a60 'a') +0x55edecaab320->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951a60 } +0x55edecaab320->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951a60 } +0x7ffee0951a60->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951a60 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x7ffee095195f->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320 } +0x7ffee0951a40->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee095195f } +0x7ffee095195f->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee095195f, 0x7ffee0951a40 } +Next token is token 'a' (0x7ffee0951a40 'a') +0x7ffee0951980->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951a40 } +0x7ffee0951a40->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951980, 0x7ffee0951a40 } +Shifting token 'a' (0x7ffee0951980 'a') +0x55edecaab340->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951980 } +0x7ffee0951980->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee0951980 } +Entering state 1 +Stack now 0 10 10 10 1 +0x7ffee0951a60->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55edecaab340 'a') +-> $$ = nterm item (0x7ffee0951a60 'a') +0x55edecaab340->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee0951a60 } +0x55edecaab340->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951a60 } +0x7ffee0951a60->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee0951a60 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x7ffee095195f->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340 } +0x7ffee0951a40->Object::Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee095195f } +0x7ffee095195f->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee095195f, 0x7ffee0951a40 } +Next token is token 'p' (0x7ffee0951a40 'p'Exception caught: cleaning lookahead and stack +0x55edecaab340->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x55edecaab340, 0x7ffee0951a40 } +0x55edecaab320->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x55edecaab320, 0x7ffee0951a40 } +0x55edecaab300->Object::~Object { 0x55edecaab2e0, 0x55edecaab300, 0x7ffee0951a40 } +0x55edecaab2e0->Object::~Object { 0x55edecaab2e0, 0x7ffee0951a40 } +0x7ffee0951a40->Object::~Object { 0x7ffee0951a40 } +exception caught: printer +end { } +./c++.at:1363: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaE +stderr: +stderr: stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:857: $PREPARSER ./input +exception caught: syntax error, unexpected end of file, expecting 'a' +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stdout: stderr: ./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' +./c++.at:1363: $PREPARSER ./input aaaaT ./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:566: $here/modern +./c++.at:1363: $PREPARSER ./input aaaaR +stdout: +Modern C++: 202302 +./c++.at:566: $PREPARSER ./list +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +688. c++.at:1363: ok +666. c++.at:566: ok stderr: stdout: ./c++.at:568: $here/modern +stderr: +stdout: stdout: Modern C++: 202302 +./c++.at:851: $PREPARSER ./input ./c++.at:568: $PREPARSER ./list stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Destroy: "" Destroy: "" Destroy: 1 @@ -271070,33 +271073,28 @@ Destroy: () Destroy: (0, 1, 2, 4, 6) ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 668. c++.at:568: ok stderr: stdout: -./c++.at:851: $PREPARSER ./input +./c++.at:857: $PREPARSER ./input stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:857: $PREPARSER ./input -stderr: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:571: $here/modern -======== Testing with C++ standard flags: '' +./c++.at:567: $here/modern stdout: -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Modern C++: 202302 -./c++.at:571: $PREPARSER ./list +./c++.at:567: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -271119,14 +271117,14 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -671. c++.at:571: ok +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +667. c++.at:567: ok stderr: stdout: -./c++.at:567: $here/modern +./c++.at:570: $here/modern stdout: Modern C++: 202302 -./c++.at:567: $PREPARSER ./list +./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -271149,18 +271147,49 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -667. c++.at:567: ok +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +670. c++.at:570: ok +stderr: +stdout: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:570: $here/modern +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:857: $PREPARSER ./input +stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:573: $here/modern +stderr: +stdout: stdout: Modern C++: 202302 -./c++.at:570: $PREPARSER ./list +./c++.at:573: $PREPARSER ./list +./c++.at:574: $here/modern stderr: +stdout: Destroy: "0" Destroy: "0" Destroy: 1 @@ -271182,24 +271211,9 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -670. c++.at:570: ok -stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:574: $here/modern -stdout: Modern C++: 202302 ./c++.at:574: $PREPARSER ./list +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Destroy: "" Destroy: "" @@ -271222,15 +271236,17 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +673. c++.at:573: 674. c++.at:574: ok + ok stderr: stdout: -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:569: $here/modern +./c++.at:571: $here/modern stdout: Modern C++: 202302 -./c++.at:569: $PREPARSER ./list +./c++.at:571: $PREPARSER ./list +stderr: stderr: -674. c++.at:574: ok Destroy: "0" Destroy: "0" Destroy: 1 @@ -271252,8 +271268,14 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -669. c++.at:569: ok +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:857: $PREPARSER ./input +671. c++.at:571: ok +stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:572: $here/modern @@ -271286,10 +271308,10 @@ 672. c++.at:572: ok stderr: stdout: -./c++.at:573: $here/modern +./c++.at:569: $here/modern stdout: Modern C++: 202302 -./c++.at:573: $PREPARSER ./list +./c++.at:569: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -271312,8 +271334,8 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -673. c++.at:573: ok +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +669. c++.at:569: ok stderr: stdout: ./c++.at:851: $PREPARSER ./input @@ -271326,9 +271348,8 @@ ./c++.at:857: $PREPARSER ./input stderr: ./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:858: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy ======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:1066: $PREPARSER ./input < in @@ -271349,18 +271370,32 @@ 684. c++.at:1066: ok stderr: stdout: -./c++.at:858: $PREPARSER ./input +./c++.at:851: $PREPARSER ./input stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:857: $PREPARSER ./input +stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:851: $PREPARSER ./input stderr: ./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +678. c++.at:848: ok +stderr: +stdout: +./c++.at:857: $PREPARSER ./input +stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:858: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy ======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:858: $PREPARSER ./input @@ -271377,10 +271412,11 @@ ./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:851: $PREPARSER ./input +./c++.at:858: $PREPARSER ./input stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -678. c++.at:848: ok +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:858: $PREPARSER ./input @@ -271543,7 +271579,7 @@ create-stamp debian/debhelper-build-stamp dh_prep dh_auto_install - make -j40 install DESTDIR=/build/reproducible-path/bison-3.8.2\+dfsg/debian/tmp AM_UPDATE_INFO_DIR=no + make -j42 install DESTDIR=/build/reproducible-path/bison-3.8.2\+dfsg/debian/tmp AM_UPDATE_INFO_DIR=no make[1]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' make install-recursive make[2]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' @@ -271714,70 +271750,70 @@ /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/x86_64-linux-gnu' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' - /usr/bin/install -c -m 644 lib/liby.a '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/x86_64-linux-gnu' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' + /usr/bin/install -c -m 644 lib/liby.a '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/x86_64-linux-gnu' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' - /usr/bin/install -c src/bison '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' - /usr/bin/install -c -m 644 m4/bison-i18n.m4 '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' - /usr/bin/install -c src/yacc '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' + /usr/bin/install -c src/bison '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' - /usr/bin/install -c -m 644 examples/c/bistromathic/parse.y examples/c/bistromathic/Makefile examples/c/bistromathic/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' - /usr/bin/install -c -m 644 examples/c++/simple.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' - /usr/bin/install -c -m 644 examples/c++/calc++/driver.cc examples/c++/calc++/driver.hh examples/c++/calc++/scanner.ll examples/c++/calc++/calc++.cc examples/c++/calc++/parser.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' - ( cd '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/x86_64-linux-gnu' && ranlib liby.a ) + /usr/bin/install -c src/yacc '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' + /usr/bin/install -c -m 644 m4/bison-i18n.m4 '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' - /usr/bin/install -c -m 644 examples/c/calc/calc.y examples/c/calc/Makefile examples/c/calc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' - /usr/bin/install -c -m 644 examples/d/calc/calc.y examples/d/calc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' + /usr/bin/install -c -m 644 examples/c++/calc++/driver.cc examples/c++/calc++/driver.hh examples/c++/calc++/scanner.ll examples/c++/calc++/calc++.cc examples/c++/calc++/parser.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' + /usr/bin/install -c -m 644 examples/c++/simple.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' + /usr/bin/install -c -m 644 examples/c/bistromathic/parse.y examples/c/bistromathic/Makefile examples/c/bistromathic/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' /usr/bin/install -c -m 644 examples/c/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' - /usr/bin/install -c -m 644 examples/c++/calc++/README.md examples/c++/calc++/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' + /usr/bin/install -c -m 644 examples/c/calc/calc.y examples/c/calc/Makefile examples/c/calc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' - /usr/bin/install -c -m 644 examples/c++/README.md examples/c++/Makefile examples/c++/variant.yy examples/c++/variant-11.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' + ( cd '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/x86_64-linux-gnu' && ranlib liby.a ) + /usr/bin/install -c -m 644 examples/c++/calc++/README.md examples/c++/calc++/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' + /usr/bin/install -c -m 644 examples/d/calc/calc.y examples/d/calc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' /usr/bin/install -c -m 644 examples/d/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d' + /usr/bin/install -c -m 644 examples/c++/README.md examples/c++/Makefile examples/c++/variant.yy examples/c++/variant-11.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' /usr/bin/install -c -m 644 AUTHORS COPYING NEWS README THANKS TODO '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' + /usr/bin/install -c -m 644 examples/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' - /usr/bin/install -c -m 644 examples/java/calc/Calc.y examples/java/calc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' - /usr/bin/install -c -m 644 examples/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' - /usr/bin/install -c -m 644 examples/java/simple/Calc.y examples/java/simple/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' - /usr/bin/install -c -m 644 examples/c/lexcalc/parse.y examples/c/lexcalc/scan.l examples/c/lexcalc/Makefile examples/c/lexcalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' /usr/bin/install -c -m 644 examples/c/glr/c++-types.y examples/c/glr/Makefile examples/c/glr/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' + /usr/bin/install -c -m 644 examples/java/calc/Calc.y examples/java/calc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' /usr/bin/install -c -m 644 examples/java/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc' + /usr/bin/install -c -m 644 examples/java/simple/Calc.y examples/java/simple/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' - /usr/bin/install -c -m 644 data/README.md data/bison-default.css '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' - /usr/bin/install -c -m 644 examples/c/mfcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' - /usr/bin/install -c -m 644 examples/c/pushcalc/calc.y examples/c/pushcalc/Makefile examples/c/pushcalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc' /usr/bin/install -c -m 644 data/m4sugar/foreach.m4 data/m4sugar/m4sugar.m4 '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' - /usr/bin/install -c -m 644 examples/c/rpcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' - /usr/bin/install -c -m 644 examples/c/reccalc/parse.y examples/c/reccalc/scan.l examples/c/reccalc/Makefile examples/c/reccalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' - /usr/bin/install -c -m 644 examples/d/simple/calc.y examples/d/simple/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' + /usr/bin/install -c -m 644 examples/c/lexcalc/parse.y examples/c/lexcalc/scan.l examples/c/lexcalc/Makefile examples/c/lexcalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' + /usr/bin/install -c -m 644 examples/c/mfcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' + /usr/bin/install -c -m 644 data/README.md data/bison-default.css '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' /usr/bin/mkdir -p doc + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' + /usr/bin/install -c -m 644 examples/c/pushcalc/calc.y examples/c/pushcalc/Makefile examples/c/pushcalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt' +LC_ALL=C tests/bison --version >doc/bison.help.tmp + /usr/bin/install -c -m 644 examples/c/reccalc/parse.y examples/c/reccalc/scan.l examples/c/reccalc/Makefile examples/c/reccalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' + /usr/bin/install -c -m 644 examples/c/rpcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' + /usr/bin/install -c -m 644 examples/d/simple/calc.y examples/d/simple/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' /usr/bin/install -c -m 644 data/skeletons/bison.m4 data/skeletons/c++-skel.m4 data/skeletons/c++.m4 data/skeletons/c-like.m4 data/skeletons/c-skel.m4 data/skeletons/c.m4 data/skeletons/glr.c data/skeletons/glr.cc data/skeletons/glr2.cc data/skeletons/java-skel.m4 data/skeletons/java.m4 data/skeletons/lalr1.cc data/skeletons/lalr1.java data/skeletons/location.cc data/skeletons/stack.hh data/skeletons/traceon.m4 data/skeletons/variant.hh data/skeletons/yacc.c data/skeletons/d-skel.m4 data/skeletons/d.m4 data/skeletons/lalr1.d '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' /usr/bin/install -c -m 644 data/xslt/bison.xsl data/xslt/xml2dot.xsl data/xslt/xml2text.xsl data/xslt/xml2xhtml.xsl '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt' -LC_ALL=C tests/bison --version >doc/bison.help.tmp - /usr/bin/install -c -m 644 examples/c/rpcalc/rpcalc.y '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' /usr/bin/install -c -m 644 examples/c/mfcalc/calc.h examples/c/mfcalc/mfcalc.y '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' LC_ALL=C tests/bison --help | \ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp + /usr/bin/install -c -m 644 examples/c/rpcalc/rpcalc.y '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/man/man1' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/info' @@ -271803,117 +271839,117 @@ dh_perl dh_link dh_strip_nondeterminism - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/eu/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/cs/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ky/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ia/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/eu/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/lt/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ast/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/ta/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ky/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-gnulib.mo -Garbage at end of string in strptime: +02:00 at /usr/lib/x86_64-linux-gnu/perl/5.40/Time/Piece.pm line 598. - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sq/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/cs/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/th/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ia/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ko/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/rw/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/lv/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ko/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/af/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-runtime.mo +Garbage at end of string in strptime: +02:00 at /usr/lib/x86_64-linux-gnu/perl/5.40/Time/Piece.pm line 598. Perhaps a format flag did not match the actual input? at /usr/lib/x86_64-linux-gnu/perl/5.40/Time/Piece.pm line 598. - Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/be/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/sk/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/lt/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sq/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/lv/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ast/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/be/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/af/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/rw/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/th/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo dh_compress dh_fixperms dh_missing @@ -271925,9 +271961,9 @@ dh_gencontrol dh_md5sums dh_builddeb +dpkg-deb: building package 'libbison-dev' in '../libbison-dev_3.8.2+dfsg-1_amd64.deb'. dpkg-deb: building package 'bison' in '../bison_3.8.2+dfsg-1_amd64.deb'. dpkg-deb: building package 'bison-dbgsym' in '../bison-dbgsym_3.8.2+dfsg-1_amd64.deb'. -dpkg-deb: building package 'libbison-dev' in '../libbison-dev_3.8.2+dfsg-1_amd64.deb'. dpkg-genbuildinfo --build=binary -O../bison_3.8.2+dfsg-1_amd64.buildinfo dpkg-genchanges --build=binary -O../bison_3.8.2+dfsg-1_amd64.changes dpkg-genchanges: info: binary-only upload (no source code included) @@ -271935,12 +271971,14 @@ dpkg-buildpackage: info: binary-only upload (no source included) dpkg-genchanges: info: including full source code in upload I: copying local configuration +I: user script /srv/workspace/pbuilder/629641/tmp/hooks/B01_cleanup starting +I: user script /srv/workspace/pbuilder/629641/tmp/hooks/B01_cleanup finished I: unmounting dev/ptmx filesystem I: unmounting dev/pts filesystem I: unmounting dev/shm filesystem I: unmounting proc filesystem I: unmounting sys filesystem I: cleaning the build env -I: removing directory /srv/workspace/pbuilder/3563770 and its subdirectories -I: Current time: Tue Sep 9 01:33:41 -12 2025 -I: pbuilder-time-stamp: 1757424821 +I: removing directory /srv/workspace/pbuilder/629641 and its subdirectories +I: Current time: Tue Oct 13 10:03:37 +14 2026 +I: pbuilder-time-stamp: 1791835417